diff --git "a/raw/code_summarization_gh_2023.jsonl" "b/raw/code_summarization_gh_2023.jsonl" new file mode 100644--- /dev/null +++ "b/raw/code_summarization_gh_2023.jsonl" @@ -0,0 +1,2000 @@ +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "between_residue_clash_loss", "code": "def between_residue_clash_loss(\n atom23_pred_positions: torch.Tensor,\n atom23_atom_exists: torch.Tensor,\n atom23_atom_radius: torch.Tensor,\n residue_index: torch.Tensor,\n overlap_tolerance_soft=1.5,\n overlap_tolerance_hard=1.5,\n eps=1e-10,\n) -> Dict[str, torch.Tensor]:\n \"\"\"Loss to penalize steric clashes between residues.\n\n This is a loss penalizing any steric clashes due to non bonded atoms in\n different peptides coming too close. This loss corresponds to the part with\n different residues of\n Jumper et al. (2021) Suppl. Sec. 1.9.11, eq 46.\n\n Args:\n atom14_pred_positions: Predicted positions of atoms in\n global prediction frame\n atom14_atom_exists: Mask denoting whether atom at positions exists for given\n amino acid type\n atom14_atom_radius: Van der Waals radius for each atom.\n residue_index: Residue index for given amino acid.\n overlap_tolerance_soft: Soft tolerance factor.\n overlap_tolerance_hard: Hard tolerance factor.\n\n Returns:\n Dict containing:\n * 'mean_loss': average clash loss\n * 'per_atom_loss_sum': sum of all clash losses per atom, shape (N, 23)\n * 'per_atom_clash_mask': mask whether atom clashes with any other atom\n shape (N, 23)\n \"\"\"\n fp_type = atom23_pred_positions.dtype\n\n # Create the distance matrix.\n # (N, N, 23, 23)\n dists = torch.sqrt(\n eps\n + torch.sum(\n (\n atom23_pred_positions[..., :, None, :, None, :]\n - atom23_pred_positions[..., None, :, None, :, :]\n )\n ** 2,\n dim=-1,\n )\n )\n\n # Create the mask for valid distances.\n # shape (N, N, 23, 23)\n dists_mask = (\n atom23_atom_exists[..., :, None, :, None]\n * atom23_atom_exists[..., None, :, None, :]\n ).type(fp_type)\n\n # Mask out all the duplicate entries in the lower triangular matrix.\n # Also mask out the diagonal (atom-pairs from the same residue) -- these atoms\n # are handled separately.\n dists_mask = dists_mask * (\n residue_index[..., :, None, None, None]\n < residue_index[..., None, :, None, None]\n )\n\n # Backbone O3'--P bond between subsequent residues is no clash.\n o3_one_hot = torch.nn.functional.one_hot(\n residue_index.new_tensor(6), num_classes=23\n )\n o3_one_hot = o3_one_hot.reshape(\n *((1,) * len(residue_index.shape[:-1])), *o3_one_hot.shape\n )\n o3_one_hot = o3_one_hot.type(fp_type)\n p_one_hot = torch.nn.functional.one_hot(\n residue_index.new_tensor(8), num_classes=23\n )\n p_one_hot = p_one_hot.reshape(\n *((1,) * len(residue_index.shape[:-1])), *p_one_hot.shape\n )\n p_one_hot = p_one_hot.type(fp_type)\n\n neighbour_mask = (\n residue_index[..., :, None, None, None] + 1\n ) == residue_index[..., None, :, None, None]\n o3_p_bonds = (\n neighbour_mask\n * o3_one_hot[..., None, None, :, None]\n * p_one_hot[..., None, None, None, :]\n )\n dists_mask = dists_mask * (1.0 - o3_p_bonds)\n\n # Compute the lower bound for the allowed distances.\n # shape (N, N, 23, 23)\n dists_lower_bound = dists_mask * (\n atom23_atom_radius[..., :, None, :, None]\n + atom23_atom_radius[..., None, :, None, :]\n )\n\n # Compute the error.\n # shape (N, N, 23, 23)\n dists_to_low_error = dists_mask * torch.nn.functional.relu(\n dists_lower_bound - overlap_tolerance_soft - dists\n )\n\n # Compute the mean loss.\n # shape ()\n mean_loss = torch.sum(dists_to_low_error) / (1e-6 + torch.sum(dists_mask))\n\n # Compute the per atom loss sum.\n # shape (N, 23)\n per_atom_loss_sum = torch.sum(dists_to_low_error, dim=(-4, -2)) + torch.sum(\n dists_to_low_error, axis=(-3, -1)\n )\n\n # Compute the hard clash mask.\n # shape (N, N, 23, 23)\n clash_mask = dists_mask * (\n dists < (dists_lower_bound - overlap_tolerance_hard)\n )\n\n # Compute the per atom clash.\n # shape (N, 23)\n per_atom_clash_mask = torch.maximum(\n torch.amax(clash_mask, axis=(-4, -2)),\n torch.amax(clash_mask, axis=(-3, -1)),\n )\n\n return {\n \"mean_loss\": mean_loss, # shape ()\n \"per_atom_loss_sum\": per_atom_loss_sum, # shape (N, 23)\n \"per_atom_clash_mask\": per_atom_clash_mask, # shape (N, 23)\n }", "docstring": "\"\"\"Loss to penalize steric clashes between residues.\n\n This is a loss penalizing any steric clashes due to non bonded atoms in\n different peptides coming too close. This loss corresponds to the part with\n different residues of\n Jumper et al. (2021) Suppl. Sec. 1.9.11, eq 46.\n\n Args:\n atom14_pred_positions: Predicted positions of atoms in\n global prediction frame\n atom14_atom_exists: Mask denoting whether atom at positions exists for given\n amino acid type\n atom14_atom_radius: Van der Waals radius for each atom.\n residue_index: Residue index for given amino acid.\n overlap_tolerance_soft: Soft tolerance factor.\n overlap_tolerance_hard: Hard tolerance factor.\n\n Returns:\n Dict containing:\n * 'mean_loss': average clash loss\n * 'per_atom_loss_sum': sum of all clash losses per atom, shape (N, 23)\n * 'per_atom_clash_mask': mask whether atom clashes with any other atom\n shape (N, 23)\n \"\"\"", "url": "https://github.com/baaihealth/OpenComplex/blob/ce0d5b97b154e992b7abb10403b2ad49f850ea6f/opencomplex/loss/loss_fns_rna.py#L180-L310", "sha": "ce0d5b97b154e992b7abb10403b2ad49f850ea6f", "code/function": "def between_residue_clash_loss(\n atom23_pred_positions: torch.Tensor,\n atom23_atom_exists: torch.Tensor,\n atom23_atom_radius: torch.Tensor,\n residue_index: torch.Tensor,\n overlap_tolerance_soft=1.5,\n overlap_tolerance_hard=1.5,\n eps=1e-10,\n) -> Dict[str, torch.Tensor]:\n \n fp_type = atom23_pred_positions.dtype\n\n # Create the distance matrix.\n # (N, N, 23, 23)\n dists = torch.sqrt(\n eps\n + torch.sum(\n (\n atom23_pred_positions[..., :, None, :, None, :]\n - atom23_pred_positions[..., None, :, None, :, :]\n )\n ** 2,\n dim=-1,\n )\n )\n\n # Create the mask for valid distances.\n # shape (N, N, 23, 23)\n dists_mask = (\n atom23_atom_exists[..., :, None, :, None]\n * atom23_atom_exists[..., None, :, None, :]\n ).type(fp_type)\n\n # Mask out all the duplicate entries in the lower triangular matrix.\n # Also mask out the diagonal (atom-pairs from the same residue) -- these atoms\n # are handled separately.\n dists_mask = dists_mask * (\n residue_index[..., :, None, None, None]\n < residue_index[..., None, :, None, None]\n )\n\n # Backbone O3'--P bond between subsequent residues is no clash.\n o3_one_hot = torch.nn.functional.one_hot(\n residue_index.new_tensor(6), num_classes=23\n )\n o3_one_hot = o3_one_hot.reshape(\n *((1,) * len(residue_index.shape[:-1])), *o3_one_hot.shape\n )\n o3_one_hot = o3_one_hot.type(fp_type)\n p_one_hot = torch.nn.functional.one_hot(\n residue_index.new_tensor(8), num_classes=23\n )\n p_one_hot = p_one_hot.reshape(\n *((1,) * len(residue_index.shape[:-1])), *p_one_hot.shape\n )\n p_one_hot = p_one_hot.type(fp_type)\n\n neighbour_mask = (\n residue_index[..., :, None, None, None] + 1\n ) == residue_index[..., None, :, None, None]\n o3_p_bonds = (\n neighbour_mask\n * o3_one_hot[..., None, None, :, None]\n * p_one_hot[..., None, None, None, :]\n )\n dists_mask = dists_mask * (1.0 - o3_p_bonds)\n\n # Compute the lower bound for the allowed distances.\n # shape (N, N, 23, 23)\n dists_lower_bound = dists_mask * (\n atom23_atom_radius[..., :, None, :, None]\n + atom23_atom_radius[..., None, :, None, :]\n )\n\n # Compute the error.\n # shape (N, N, 23, 23)\n dists_to_low_error = dists_mask * torch.nn.functional.relu(\n dists_lower_bound - overlap_tolerance_soft - dists\n )\n\n # Compute the mean loss.\n # shape ()\n mean_loss = torch.sum(dists_to_low_error) / (1e-6 + torch.sum(dists_mask))\n\n # Compute the per atom loss sum.\n # shape (N, 23)\n per_atom_loss_sum = torch.sum(dists_to_low_error, dim=(-4, -2)) + torch.sum(\n dists_to_low_error, axis=(-3, -1)\n )\n\n # Compute the hard clash mask.\n # shape (N, N, 23, 23)\n clash_mask = dists_mask * (\n dists < (dists_lower_bound - overlap_tolerance_hard)\n )\n\n # Compute the per atom clash.\n # shape (N, 23)\n per_atom_clash_mask = torch.maximum(\n torch.amax(clash_mask, axis=(-4, -2)),\n torch.amax(clash_mask, axis=(-3, -1)),\n )\n\n return {\n \"mean_loss\": mean_loss, # shape ()\n \"per_atom_loss_sum\": per_atom_loss_sum, # shape (N, 23)\n \"per_atom_clash_mask\": per_atom_clash_mask, # shape (N, 23)\n }"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "apply_parameter", "code": "def apply_parameter(self, *args: pycardano.Datum):\n \"\"\"\n Returns a new OpShin Contract with the applied parameters\n \"\"\"\n # update the parameters in the blueprint (remove applied parameters)\n assert len(self.parameter_types) >= len(\n args\n ), f\"Applying too many parameters to contract, allowed amount: {self.parameter_types}, but got {len(args)}\"\n new_parameter_types = copy.copy(self.parameter_types)\n for _ in args:\n # TODO validate that the applied parameters are of the correct type\n new_parameter_types.pop(0)\n new_contract_contract = apply_parameters(self.contract, *args)\n new_contract = PlutusContract(\n new_contract_contract,\n self.datum_type,\n self.redeemer_type,\n new_parameter_types,\n self.purpose,\n self.version,\n self.title,\n self.description,\n )\n return new_contract", "docstring": "\"\"\"\n Returns a new OpShin Contract with the applied parameters\n \"\"\"", "url": "https://github.com/OpShin/opshin/blob/d657a227f02670e6b6eed9cac77c0f8a25d51423/opshin/builder.py#L146-L169", "sha": "d657a227f02670e6b6eed9cac77c0f8a25d51423", "code/function": "def apply_parameter(self, *args: pycardano.Datum):\n \n # update the parameters in the blueprint (remove applied parameters)\n assert len(self.parameter_types) >= len(\n args\n ), f\"Applying too many parameters to contract, allowed amount: {self.parameter_types}, but got {len(args)}\"\n new_parameter_types = copy.copy(self.parameter_types)\n for _ in args:\n # TODO validate that the applied parameters are of the correct type\n new_parameter_types.pop(0)\n new_contract_contract = apply_parameters(self.contract, *args)\n new_contract = PlutusContract(\n new_contract_contract,\n self.datum_type,\n self.redeemer_type,\n new_parameter_types,\n self.purpose,\n self.version,\n self.title,\n self.description,\n )\n return new_contract"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LM.loglikelihood_rolling", "code": "@abc.abstractmethod\n def loglikelihood_rolling(self, requests):\n \"\"\"Compute full log-likelihood of a string, with no truncation, for perplexity computation\n - We will use the full max context length of the model.\n - For inputs that exceed the max context length, we divide the tokenized string into chunks of up to\n the max context length.\n - IMPORTANT: Each document's loglikelihood/perplexity is computed *separately*, unlike other implementations\n which may simply concatenate multiple documents together.\n - IMPORTANT: We maximize the amount of context for each prediction. Specifically, for inputs that we break into\n multiple chunks, the last input will still a full-sized context.\n Example:\n Input tokens: [ 0 1 2 3 4 5 6 7 8 9 ]\n Prefix: EOT\n Max context length: 4\n Resulting input/prediction pairs:\n\n INPUT: EOT 0 1 2\n PRED: 0 1 2 3\n\n INPUT: 3 4 5 6\n PRED: 4 5 6 7\n\n INPUT: 5 6 7 8\n PRED: 8 9\n\n Observe that:\n 1. Each token is predicted exactly once\n 2. For the last pair, we provide the full context, but only score the last two tokens\n\n :param requests: list\n A list of strings\n string: str\n String for which we are computing per-toke loglikelihood\n :return: list\n A list of pairs (logprob, isgreedy)\n logprob: float\n The log probability of `continuation`\n isgreedy:\n Whether `continuation` would be generated by greedy sampling from `context`\n \"\"\"\n pass", "docstring": "\"\"\"Compute full log-likelihood of a string, with no truncation, for perplexity computation\n - We will use the full max context length of the model.\n - For inputs that exceed the max context length, we divide the tokenized string into chunks of up to\n the max context length.\n - IMPORTANT: Each document's loglikelihood/perplexity is computed *separately*, unlike other implementations\n which may simply concatenate multiple documents together.\n - IMPORTANT: We maximize the amount of context for each prediction. Specifically, for inputs that we break into\n multiple chunks, the last input will still a full-sized context.\n Example:\n Input tokens: [ 0 1 2 3 4 5 6 7 8 9 ]\n Prefix: EOT\n Max context length: 4\n Resulting input/prediction pairs:\n\n INPUT: EOT 0 1 2\n PRED: 0 1 2 3\n\n INPUT: 3 4 5 6\n PRED: 4 5 6 7\n\n INPUT: 5 6 7 8\n PRED: 8 9\n\n Observe that:\n 1. Each token is predicted exactly once\n 2. For the last pair, we provide the full context, but only score the last two tokens\n\n :param requests: list\n A list of strings\n string: str\n String for which we are computing per-toke loglikelihood\n :return: list\n A list of pairs (logprob, isgreedy)\n logprob: float\n The log probability of `continuation`\n isgreedy:\n Whether `continuation` would be generated by greedy sampling from `context`\n \"\"\"", "url": "https://github.com/NVIDIA/NeMo-Framework-Launcher/blob/4abd481402adc8a061942c486eda6d71f19de718/launcher_scripts/nemo_launcher/collections/eval_harness/lm_eval/base.py#L64-L104", "sha": "4abd481402adc8a061942c486eda6d71f19de718", "code/function": "@abc.abstractmethod\n def loglikelihood_rolling(self, requests):\n \n pass"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Backtest_TestCase._test", "code": "@abc.abstractmethod\n def _test(self, *args: Any, **kwargs: Any) -> None:\n \"\"\"\n Run the entire flow.\n \"\"\"", "docstring": "\"\"\"\n Run the entire flow.\n \"\"\"", "url": "https://github.com/causify-ai/kaizenflow/blob/545f66ef6e6b0e5109602dbf1938ef668c55750d/dataflow/backtest/backtest_test_case.py#L123-L127", "sha": "545f66ef6e6b0e5109602dbf1938ef668c55750d", "code/function": "@abc.abstractmethod\n def _test(self, *args: Any, **kwargs: Any) -> None:"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_forecast_evaluator", "code": "def get_forecast_evaluator(\n forecast_evaluator_class_name: str, **kwargs: Dict[str, Any]\n) -> dtfmabfoev.AbstractForecastEvaluator:\n \"\"\"\n Get the forecast evaluator for the backtest analysis.\n\n :param forecast_evaluator_class_name: name of the ForecastEvaluator\n as str, e.g. \"ForecastEvaluatorFromPrices\",\n \"ForecastEvaluatorWithOptimizer\" :param **kwargs: kwargs for\n ctor of the provided ForecastEvaluator class\n :return: ForecastEvaluator object\n \"\"\"\n # Choose the class based on the label.\n if forecast_evaluator_class_name == \"ForecastEvaluatorFromPrices\":\n forecast_evaluator_class = dtfmfefrpr.ForecastEvaluatorFromPrices\n #\n elif forecast_evaluator_class_name == \"ForecastEvaluatorWithOptimizer\":\n forecast_evaluator_class = ofevwiop.ForecastEvaluatorWithOptimizer\n #\n else:\n raise ValueError(\n f\"Unsupported forecast_evaluator_class_name: {forecast_evaluator_class_name}\"\n )\n # Construct the object.\n forecast_evaluator = forecast_evaluator_class(**kwargs)\n return forecast_evaluator", "docstring": "\"\"\"\n Get the forecast evaluator for the backtest analysis.\n\n :param forecast_evaluator_class_name: name of the ForecastEvaluator\n as str, e.g. \"ForecastEvaluatorFromPrices\",\n \"ForecastEvaluatorWithOptimizer\" :param **kwargs: kwargs for\n ctor of the provided ForecastEvaluator class\n :return: ForecastEvaluator object\n \"\"\"", "url": "https://github.com/causify-ai/kaizenflow/blob/545f66ef6e6b0e5109602dbf1938ef668c55750d/dataflow/model/backtest_notebook_utils.py#L138-L163", "sha": "545f66ef6e6b0e5109602dbf1938ef668c55750d", "code/function": "def get_forecast_evaluator(\n forecast_evaluator_class_name: str, **kwargs: Dict[str, Any]\n) -> dtfmabfoev.AbstractForecastEvaluator:\n \n # Choose the class based on the label.\n if forecast_evaluator_class_name == \"ForecastEvaluatorFromPrices\":\n forecast_evaluator_class = dtfmfefrpr.ForecastEvaluatorFromPrices\n #\n elif forecast_evaluator_class_name == \"ForecastEvaluatorWithOptimizer\":\n forecast_evaluator_class = ofevwiop.ForecastEvaluatorWithOptimizer\n #\n else:\n raise ValueError(\n f\"Unsupported forecast_evaluator_class_name: {forecast_evaluator_class_name}\"\n )\n # Construct the object.\n forecast_evaluator = forecast_evaluator_class(**kwargs)\n return forecast_evaluator"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "to_typed_csv", "code": "def to_typed_csv(df: pd.DataFrame, file_name: str) -> str:\n \"\"\"\n Convert df into CSV and creates a file with the dtypes of columns.\n\n This function creates a file containing the types with the same name\n and suffix e.g., `foobar.csv.types`.\n \"\"\"\n # Save the types.\n dtypes_filename = file_name + \".types\"\n hio.create_enclosing_dir(dtypes_filename, incremental=True)\n dtypes_dict = str(df.dtypes.apply(lambda x: x.name).to_dict())\n # Save the data.\n df.to_csv(file_name, index=False)\n with open(dtypes_filename, \"w\") as dtypes_file:\n dtypes_file.write(dtypes_dict)\n return dtypes_filename", "docstring": "\"\"\"\n Convert df into CSV and creates a file with the dtypes of columns.\n\n This function creates a file containing the types with the same name\n and suffix e.g., `foobar.csv.types`.\n \"\"\"", "url": "https://github.com/causify-ai/kaizenflow/blob/545f66ef6e6b0e5109602dbf1938ef668c55750d/helpers/hcsv.py#L350-L365", "sha": "545f66ef6e6b0e5109602dbf1938ef668c55750d", "code/function": "def to_typed_csv(df: pd.DataFrame, file_name: str) -> str:\n \n # Save the types.\n dtypes_filename = file_name + \".types\"\n hio.create_enclosing_dir(dtypes_filename, incremental=True)\n dtypes_dict = str(df.dtypes.apply(lambda x: x.name).to_dict())\n # Save the data.\n df.to_csv(file_name, index=False)\n with open(dtypes_filename, \"w\") as dtypes_file:\n dtypes_file.write(dtypes_dict)\n return dtypes_filename"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "purify_from_environment", "code": "def purify_from_environment(txt: str) -> str:\n \"\"\"\n Replace environment variables with placeholders.\n\n The performed transformations are:\n 1. Replace the Git path with `$GIT_ROOT`\n 2. Replace the path of current working dir with `$PWD`\n 3. Replace the current user name with `$USER_NAME`\n \"\"\"\n # 1) Remove references to Git modules starting from the innermost one.\n # Make sure that the path is not followed by a word character.\n # E.g., `/app/test.txt` is the correct path, while `/application.py`\n # is not a root path even though `/app` is the part of the text.\n dir_pattern = r\"(?![\\w])\"\n for super_module in [False, True]:\n # Replace the git path with `$GIT_ROOT`.\n super_module_path = hgit.get_client_root(super_module=super_module)\n if super_module_path != \"/\":\n pattern = re.compile(f\"{super_module_path}{dir_pattern}\")\n txt = pattern.sub(\"$GIT_ROOT\", txt)\n else:\n # If the git path is `/` then we don't need to do anything.\n pass\n # 2) Replace the path of current working dir with `$PWD`\n pwd = os.getcwd()\n pattern = re.compile(f\"{pwd}{dir_pattern}\")\n txt = pattern.sub(\"$PWD\", txt)\n # 3) Replace the current user name with `$USER_NAME`.\n user_name = hsystem.get_user_name()\n # Set a regex pattern that finds a user name surrounded by dot, dash or space.\n # E.g., `IMAGE=$CK_ECR_BASE_PATH/amp_test:local-$USER_NAME-1.0.0`,\n # `--name $USER_NAME.amp_test.app.app`, `run --rm -l user=$USER_NAME`.\n pattern = rf\"([\\s\\n\\-\\.\\=]|^)+{user_name}+([.\\s/-]|$)\"\n # Use `\\1` and `\\2` to preserve specific characters around `$USER_NAME`.\n target = r\"\\1$USER_NAME\\2\"\n txt = re.sub(pattern, target, txt)\n _LOG.debug(\"After %s: txt='\\n%s'\", hintros.get_function_name(), txt)\n return txt", "docstring": "\"\"\"\n Replace environment variables with placeholders.\n\n The performed transformations are:\n 1. Replace the Git path with `$GIT_ROOT`\n 2. Replace the path of current working dir with `$PWD`\n 3. Replace the current user name with `$USER_NAME`\n \"\"\"", "url": "https://github.com/causify-ai/kaizenflow/blob/545f66ef6e6b0e5109602dbf1938ef668c55750d/helpers/hunit_test.py#L372-L409", "sha": "545f66ef6e6b0e5109602dbf1938ef668c55750d", "code/function": "def purify_from_environment(txt: str) -> str:\n \n # 1) Remove references to Git modules starting from the innermost one.\n # Make sure that the path is not followed by a word character.\n # E.g., `/app/test.txt` is the correct path, while `/application.py`\n # is not a root path even though `/app` is the part of the text.\n dir_pattern = r\"(?![\\w])\"\n for super_module in [False, True]:\n # Replace the git path with `$GIT_ROOT`.\n super_module_path = hgit.get_client_root(super_module=super_module)\n if super_module_path != \"/\":\n pattern = re.compile(f\"{super_module_path}{dir_pattern}\")\n txt = pattern.sub(\"$GIT_ROOT\", txt)\n else:\n # If the git path is `/` then we don't need to do anything.\n pass\n # 2) Replace the path of current working dir with `$PWD`\n pwd = os.getcwd()\n pattern = re.compile(f\"{pwd}{dir_pattern}\")\n txt = pattern.sub(\"$PWD\", txt)\n # 3) Replace the current user name with `$USER_NAME`.\n user_name = hsystem.get_user_name()\n # Set a regex pattern that finds a user name surrounded by dot, dash or space.\n # E.g., `IMAGE=$CK_ECR_BASE_PATH/amp_test:local-$USER_NAME-1.0.0`,\n # `--name $USER_NAME.amp_test.app.app`, `run --rm -l user=$USER_NAME`.\n pattern = rf\"([\\s\\n\\-\\.\\=]|^)+{user_name}+([.\\s/-]|$)\"\n # Use `\\1` and `\\2` to preserve specific characters around `$USER_NAME`.\n target = r\"\\1$USER_NAME\\2\"\n txt = re.sub(pattern, target, txt)\n _LOG.debug(\"After %s: txt='\\n%s'\", hintros.get_function_name(), txt)\n return txt"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_get_docker_compose_cmd", "code": "def _get_docker_compose_cmd(\n base_image: str,\n stage: str,\n version: str,\n cmd: str,\n *,\n # TODO(gp): make these params mandatory.\n extra_env_vars: Optional[List[str]] = None,\n extra_docker_compose_files: Optional[List[str]] = None,\n extra_docker_run_opts: Optional[List[str]] = None,\n service_name: str = \"app\",\n entrypoint: bool = True,\n generate_docker_compose_file: bool = True,\n as_user: bool = True,\n print_docker_config: bool = False,\n use_bash: bool = False,\n) -> str:\n \"\"\"\n Get `docker-compose` run command.\n\n E.g.,\n ```\n IMAGE=*****..dkr.ecr.us-east-1.amazonaws.com/amp:dev \\\n docker-compose \\\n --file /amp/devops/compose/docker-compose.yml \\\n --env-file devops/env/default.env \\\n run \\\n --rm \\\n --name grisha.cmamp.app.cmamp1.20220317_232120 \\\n --user $(id -u):$(id -g) \\\n app \\\n bash\n ```\n :param cmd: command to run inside Docker container\n :param extra_docker_run_opts: additional `docker-compose` run options\n :param service_name: service to use to run a command\n :param entrypoint: whether to use the `entrypoint` or not\n :param generate_docker_compose_file: generate the Docker compose file or not\n :param as_user: pass the user / group id or not\n :param print_docker_config: print the docker config for debugging purposes\n :param use_bash: run command through a shell\n \"\"\"\n hprint.log(\n _LOG,\n logging.DEBUG,\n \"cmd extra_docker_run_opts service_name \"\n \"entrypoint as_user print_docker_config use_bash\",\n )\n # - Get the base Docker command.\n docker_cmd_ = _get_docker_base_cmd(\n base_image,\n stage,\n version,\n service_name,\n generate_docker_compose_file,\n extra_env_vars,\n extra_docker_compose_files,\n )\n # - Add the `config` command for debugging purposes.\n docker_config_cmd: List[str] = docker_cmd_[:]\n docker_config_cmd.append(\n r\"\"\"\n config\"\"\"\n )\n # - Add the `run` command.\n docker_cmd_.append(\n r\"\"\"\n run \\\n --rm\"\"\"\n )\n # - Add a name to the container.\n container_name = _get_container_name(service_name)\n docker_cmd_.append(\n rf\"\"\"\n --name {container_name}\"\"\"\n )\n # - Handle the user.\n as_user = _run_docker_as_user(as_user)\n if as_user:\n docker_cmd_.append(\n r\"\"\"\n --user $(id -u):$(id -g)\"\"\"\n )\n # - Handle the extra docker options.\n if extra_docker_run_opts:\n hdbg.dassert_isinstance(extra_docker_run_opts, list)\n extra_opts = \" \".join(extra_docker_run_opts)\n docker_cmd_.append(\n rf\"\"\"\n {extra_opts}\"\"\"\n )\n # - Handle entrypoint.\n if entrypoint:\n docker_cmd_.append(\n rf\"\"\"\n {service_name}\"\"\"\n )\n if cmd:\n if use_bash:\n cmd = f\"bash -c '{cmd}'\"\n docker_cmd_.append(\n rf\"\"\"\n {cmd}\"\"\"\n )\n else:\n # No entrypoint.\n docker_cmd_.append(\n rf\"\"\"\n --entrypoint bash \\\n {service_name}\"\"\"\n )\n # Print the config for debugging purpose.\n if print_docker_config:\n docker_config_cmd_as_str = hlitauti.to_multi_line_cmd(docker_config_cmd)\n _LOG.debug(\"docker_config_cmd=\\n%s\", docker_config_cmd_as_str)\n _LOG.debug(\n \"docker_config=\\n%s\",\n hsystem.system_to_string(docker_config_cmd_as_str)[1],\n )\n # Print the config for debugging purpose.\n docker_cmd_: str = hlitauti.to_multi_line_cmd(docker_cmd_)\n return docker_cmd_", "docstring": "\"\"\"\n Get `docker-compose` run command.\n\n E.g.,\n ```\n IMAGE=*****..dkr.ecr.us-east-1.amazonaws.com/amp:dev \\\n docker-compose \\\n --file /amp/devops/compose/docker-compose.yml \\\n --env-file devops/env/default.env \\\n run \\\n --rm \\\n --name grisha.cmamp.app.cmamp1.20220317_232120 \\\n --user $(id -u):$(id -g) \\\n app \\\n bash\n ```\n :param cmd: command to run inside Docker container\n :param extra_docker_run_opts: additional `docker-compose` run options\n :param service_name: service to use to run a command\n :param entrypoint: whether to use the `entrypoint` or not\n :param generate_docker_compose_file: generate the Docker compose file or not\n :param as_user: pass the user / group id or not\n :param print_docker_config: print the docker config for debugging purposes\n :param use_bash: run command through a shell\n \"\"\"", "url": "https://github.com/causify-ai/kaizenflow/blob/545f66ef6e6b0e5109602dbf1938ef668c55750d/helpers/lib_tasks_docker.py#L1133-L1254", "sha": "545f66ef6e6b0e5109602dbf1938ef668c55750d", "code/function": "def _get_docker_compose_cmd(\n base_image: str,\n stage: str,\n version: str,\n cmd: str,\n *,\n # TODO(gp): make these params mandatory.\n extra_env_vars: Optional[List[str]] = None,\n extra_docker_compose_files: Optional[List[str]] = None,\n extra_docker_run_opts: Optional[List[str]] = None,\n service_name: str = \"app\",\n entrypoint: bool = True,\n generate_docker_compose_file: bool = True,\n as_user: bool = True,\n print_docker_config: bool = False,\n use_bash: bool = False,\n) -> str:\n \n hprint.log(\n _LOG,\n logging.DEBUG,\n \"cmd extra_docker_run_opts service_name \"\n \"entrypoint as_user print_docker_config use_bash\",\n )\n # - Get the base Docker command.\n docker_cmd_ = _get_docker_base_cmd(\n base_image,\n stage,\n version,\n service_name,\n generate_docker_compose_file,\n extra_env_vars,\n extra_docker_compose_files,\n )\n # - Add the `config` command for debugging purposes.\n docker_config_cmd: List[str] = docker_cmd_[:]\n docker_config_cmd.append(\n r\"\"\"\n config\"\"\"\n )\n # - Add the `run` command.\n docker_cmd_.append(\n r\"\"\"\n run \\\n --rm\"\"\"\n )\n # - Add a name to the container.\n container_name = _get_container_name(service_name)\n docker_cmd_.append(\n rf\"\"\"\n --name {container_name}\"\"\"\n )\n # - Handle the user.\n as_user = _run_docker_as_user(as_user)\n if as_user:\n docker_cmd_.append(\n r\"\"\"\n --user $(id -u):$(id -g)\"\"\"\n )\n # - Handle the extra docker options.\n if extra_docker_run_opts:\n hdbg.dassert_isinstance(extra_docker_run_opts, list)\n extra_opts = \" \".join(extra_docker_run_opts)\n docker_cmd_.append(\n rf\"\"\"\n {extra_opts}\"\"\"\n )\n # - Handle entrypoint.\n if entrypoint:\n docker_cmd_.append(\n rf\"\"\"\n {service_name}\"\"\"\n )\n if cmd:\n if use_bash:\n cmd = f\"bash -c '{cmd}'\"\n docker_cmd_.append(\n rf\"\"\"\n {cmd}\"\"\"\n )\n else:\n # No entrypoint.\n docker_cmd_.append(\n rf\"\"\"\n --entrypoint bash \\\n {service_name}\"\"\"\n )\n # Print the config for debugging purpose.\n if print_docker_config:\n docker_config_cmd_as_str = hlitauti.to_multi_line_cmd(docker_config_cmd)\n _LOG.debug(\"docker_config_cmd=\\n%s\", docker_config_cmd_as_str)\n _LOG.debug(\n \"docker_config=\\n%s\",\n hsystem.system_to_string(docker_config_cmd_as_str)[1],\n )\n # Print the config for debugging purpose.\n docker_cmd_: str = hlitauti.to_multi_line_cmd(docker_cmd_)\n return docker_cmd_"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_apply_trimming", "code": "def _apply_trimming(self, df: pd.DataFrame) -> pd.DataFrame:\n \"\"\"\n Trim `df` according to ATH, weekends, missing data.\n\n :param df: as in `compute_portfolio()`\n :return: `df` trimmed down to:\n - required and possibly optional columns\n - \"active\" bars (bars where at least one instrument has an end-of-bar\n price)\n - first index with both a returns prediction and a volatility\n \"\"\"\n _LOG.debug(\"df.shape=%s\", str(df.shape))\n # Restrict to required columns.\n cols = [self._price_col, self._volatility_col, self._prediction_col]\n df = df[cols]\n active_index = cofinanc.infer_active_bars(df[self._price_col])\n # Drop rows with no prices (this is an approximate way to handle weekends,\n # market holidays, and shortened trading sessions).\n df = df.reindex(index=active_index)\n _LOG.debug(\"after active_index: df.shape=%s\", df.shape)\n # Drop indices with prices that precede any returns prediction or\n # volatility computation.\n first_valid_prediction_index = df[\n self._prediction_col\n ].first_valid_index()\n hdbg.dassert_is_not(first_valid_prediction_index, None)\n _LOG.debug(hprint.to_str(\"first_valid_prediction_index\"))\n #\n first_valid_volatility_index = df[\n self._volatility_col\n ].first_valid_index()\n hdbg.dassert_is_not(first_valid_volatility_index, None)\n _LOG.debug(hprint.to_str(\"first_valid_volatility_index\"))\n #\n first_valid_index = max(\n first_valid_prediction_index, first_valid_volatility_index\n )\n df = df.loc[first_valid_index:]\n _LOG.debug(\"df.shape=%s\", str(df.shape))\n _LOG.debug(\"trimmed df=\\n%s\", hpandas.df_to_str(df))\n return df", "docstring": "\"\"\"\n Trim `df` according to ATH, weekends, missing data.\n\n :param df: as in `compute_portfolio()`\n :return: `df` trimmed down to:\n - required and possibly optional columns\n - \"active\" bars (bars where at least one instrument has an end-of-bar\n price)\n - first index with both a returns prediction and a volatility\n \"\"\"", "url": "https://github.com/causify-ai/kaizenflow/blob/545f66ef6e6b0e5109602dbf1938ef668c55750d/optimizer/forecast_evaluator_with_optimizer.py#L285-L325", "sha": "545f66ef6e6b0e5109602dbf1938ef668c55750d", "code/function": "def _apply_trimming(self, df: pd.DataFrame) -> pd.DataFrame:\n \n _LOG.debug(\"df.shape=%s\", str(df.shape))\n # Restrict to required columns.\n cols = [self._price_col, self._volatility_col, self._prediction_col]\n df = df[cols]\n active_index = cofinanc.infer_active_bars(df[self._price_col])\n # Drop rows with no prices (this is an approximate way to handle weekends,\n # market holidays, and shortened trading sessions).\n df = df.reindex(index=active_index)\n _LOG.debug(\"after active_index: df.shape=%s\", df.shape)\n # Drop indices with prices that precede any returns prediction or\n # volatility computation.\n first_valid_prediction_index = df[\n self._prediction_col\n ].first_valid_index()\n hdbg.dassert_is_not(first_valid_prediction_index, None)\n _LOG.debug(hprint.to_str(\"first_valid_prediction_index\"))\n #\n first_valid_volatility_index = df[\n self._volatility_col\n ].first_valid_index()\n hdbg.dassert_is_not(first_valid_volatility_index, None)\n _LOG.debug(hprint.to_str(\"first_valid_volatility_index\"))\n #\n first_valid_index = max(\n first_valid_prediction_index, first_valid_volatility_index\n )\n df = df.loc[first_valid_index:]\n _LOG.debug(\"df.shape=%s\", str(df.shape))\n _LOG.debug(\"trimmed df=\\n%s\", hpandas.df_to_str(df))\n return df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_github_create_issues_table_query", "code": "def get_github_create_issues_table_query() -> str:\n \"\"\"\n Get SQL query to create github_issues table.\n\n This table contains the data as it is downloaded.\n \"\"\"\n query = \"\"\"\n CREATE TABLE IF NOT EXISTS github_issues(\n id SERIAL PRIMARY KEY,\n number NUMERIC,\n title VARCHAR(500) NOT NULL,\n created_at TIMESTAMP,\n updated_at TIMESTAMP,\n closed_at TIMESTAMP,\n author_association VARCHAR(255),\n comments NUMERIC,\n body VARCHAR(50000) ,\n user_login VARCHAR(255) NOT NULL,\n user_id NUMERIC,\n Crypto_Name VARCHAR(255) NOT NULL,\n Extension VARCHAR(255) NOT NULL\n )\n\n \"\"\"\n return query", "docstring": "\"\"\"\n Get SQL query to create github_issues table.\n\n This table contains the data as it is downloaded.\n \"\"\"", "url": "https://github.com/causify-ai/kaizenflow/blob/545f66ef6e6b0e5109602dbf1938ef668c55750d/sorrentum_sandbox/spring2023/ml_projects/SorrIssue21_Team2_Implement_sandbox_for_GitHub_2/db_team2.py#L52-L76", "sha": "545f66ef6e6b0e5109602dbf1938ef668c55750d", "code/function": "def get_github_create_issues_table_query() -> str:\n \n query = \"\"\"\n CREATE TABLE IF NOT EXISTS github_issues(\n id SERIAL PRIMARY KEY,\n number NUMERIC,\n title VARCHAR(500) NOT NULL,\n created_at TIMESTAMP,\n updated_at TIMESTAMP,\n closed_at TIMESTAMP,\n author_association VARCHAR(255),\n comments NUMERIC,\n body VARCHAR(50000) ,\n user_login VARCHAR(255) NOT NULL,\n user_id NUMERIC,\n Crypto_Name VARCHAR(255) NOT NULL,\n Extension VARCHAR(255) NOT NULL\n )\n\n \"\"\"\n return query"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sample_dpmpp_2m", "code": "@torch.no_grad()\ndef sample_dpmpp_2m(\n model,\n x,\n sigmas,\n extra_args=None,\n callback=None,\n disable=None,\n warmup_lms=False,\n ddim_cutoff=0.0,\n):\n \"\"\"DPM-Solver++(2M).\"\"\"\n extra_args = {} if extra_args is None else extra_args\n s_in = x.new_ones([x.shape[0]])\n sigma_fn = lambda t: t.neg().exp()\n t_fn = lambda sigma: sigma.log().neg()\n old_denoised = None\n\n for i in trange(len(sigmas) - 1, disable=disable):\n denoised = model(x, sigmas[i] * s_in, **extra_args)\n if callback is not None:\n callback(\n {\n \"x\": x,\n \"i\": i,\n \"sigma\": sigmas[i],\n \"sigma_hat\": sigmas[i],\n \"denoised\": denoised,\n }\n )\n t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1])\n h = t_next - t\n if old_denoised is None and warmup_lms:\n r = 1 / 2\n s = t + r * h\n x_2 = (sigma_fn(s) / sigma_fn(t)) * x - (-h * r).expm1() * denoised\n denoised_i = model(x_2, sigma_fn(s) * s_in, **extra_args)\n elif sigmas[i + 1] <= ddim_cutoff or old_denoised is None:\n denoised_i = denoised\n else:\n h_last = t - t_fn(sigmas[i - 1])\n r = h_last / h\n denoised_i = (1 + 1 / (2 * r)) * denoised - (1 / (2 * r)) * old_denoised\n x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_i\n old_denoised = denoised\n return x", "docstring": "\"\"\"DPM-Solver++(2M).\"\"\"", "url": "https://github.com/stablecabal/gyre/blob/9cba9781cd458acb8b821f5dc584299cab1ed2f3/gyre/pipeline/schedulers/sample_dpmpp_2m.py#L5-L50", "sha": "9cba9781cd458acb8b821f5dc584299cab1ed2f3", "code/function": "@torch.no_grad()\ndef sample_dpmpp_2m(\n model,\n x,\n sigmas,\n extra_args=None,\n callback=None,\n disable=None,\n warmup_lms=False,\n ddim_cutoff=0.0,\n):\n \n extra_args = {} if extra_args is None else extra_args\n s_in = x.new_ones([x.shape[0]])\n sigma_fn = lambda t: t.neg().exp()\n t_fn = lambda sigma: sigma.log().neg()\n old_denoised = None\n\n for i in trange(len(sigmas) - 1, disable=disable):\n denoised = model(x, sigmas[i] * s_in, **extra_args)\n if callback is not None:\n callback(\n {\n \"x\": x,\n \"i\": i,\n \"sigma\": sigmas[i],\n \"sigma_hat\": sigmas[i],\n \"denoised\": denoised,\n }\n )\n t, t_next = t_fn(sigmas[i]), t_fn(sigmas[i + 1])\n h = t_next - t\n if old_denoised is None and warmup_lms:\n r = 1 / 2\n s = t + r * h\n x_2 = (sigma_fn(s) / sigma_fn(t)) * x - (-h * r).expm1() * denoised\n denoised_i = model(x_2, sigma_fn(s) * s_in, **extra_args)\n elif sigmas[i + 1] <= ddim_cutoff or old_denoised is None:\n denoised_i = denoised\n else:\n h_last = t - t_fn(sigmas[i - 1])\n r = h_last / h\n denoised_i = (1 + 1 / (2 * r)) * denoised - (1 / (2 * r)) * old_denoised\n x = (sigma_fn(t_next) / sigma_fn(t)) * x - (-h).expm1() * denoised_i\n old_denoised = denoised\n return x"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Select.froms", "code": "@property\n def froms(self):\n \"\"\"Return the displayed list of FromClause elements.\"\"\"\n\n return self._get_display_froms()", "docstring": "\"\"\"Return the displayed list of FromClause elements.\"\"\"", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/build/bw_internal/perf/sqlalchemy/sql/expression.py#L4842-L4846", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb", "code/function": "@property\n def froms(self):\n \n\n return self._get_display_froms()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Complex.__truediv__", "code": "@abstractmethod\n def __truediv__(self, other):\n \"\"\"self / other with __future__ division.\n\n Should promote to float when necessary.\n \"\"\"\n raise NotImplementedError", "docstring": "\"\"\"self / other with __future__ division.\n\n Should promote to float when necessary.\n \"\"\"", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Lib/numbers.py#L123-L129", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb", "code/function": "@abstractmethod\n def __truediv__(self, other):\n \n raise NotImplementedError"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "is_namespace", "code": "def is_namespace(self):\n \"\"\"Returns true if name binding introduces new namespace.\n\n If the name is used as the target of a function or class\n statement, this will be true.\n\n Note that a single name can be bound to multiple objects. If\n is_namespace() is true, the name may also be bound to other\n objects, like an int or list, that does not introduce a new\n namespace.\n \"\"\"\n return bool(self.__namespaces)", "docstring": "\"\"\"Returns true if name binding introduces new namespace.\n\n If the name is used as the target of a function or class\n statement, this will be true.\n\n Note that a single name can be bound to multiple objects. If\n is_namespace() is true, the name may also be bound to other\n objects, like an int or list, that does not introduce a new\n namespace.\n \"\"\"", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Lib/symtable.py#L207-L218", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb", "code/function": "def is_namespace(self):\n \n return bool(self.__namespaces)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "create_arc", "code": "def create_arc(self, *args, **kw):\n \"\"\"Create arc shaped region with coordinates x1,y1,x2,y2.\"\"\"\n return self._create('arc', args, kw)", "docstring": "\"\"\"Create arc shaped region with coordinates x1,y1,x2,y2.\"\"\"", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Lib/lib-tk/Tkinter.py#L2271-L2273", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb", "code/function": "def create_arc(self, *args, **kw):\n \n return self._create('arc', args, kw)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "clearstamp", "code": "def clearstamp(self, stampid):\n \"\"\"Delete stamp with given stampid\n\n Argument:\n stampid - an integer, must be return value of previous stamp() call.\n\n Example (for a Turtle instance named turtle):\n >>> turtle.color(\"blue\")\n >>> astamp = turtle.stamp()\n >>> turtle.fd(50)\n >>> turtle.clearstamp(astamp)\n \"\"\"\n self._clearstamp(stampid)\n self._update()", "docstring": "\"\"\"Delete stamp with given stampid\n\n Argument:\n stampid - an integer, must be return value of previous stamp() call.\n\n Example (for a Turtle instance named turtle):\n >>> turtle.color(\"blue\")\n >>> astamp = turtle.stamp()\n >>> turtle.fd(50)\n >>> turtle.clearstamp(astamp)\n \"\"\"", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Lib/lib-tk/turtle.py#L2933-L2946", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb", "code/function": "def clearstamp(self, stampid):\n \n self._clearstamp(stampid)\n self._update()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getEventCategory", "code": "def getEventCategory(self, record):\n \"\"\"\n Return the event category for the record.\n\n Override this if you want to specify your own categories. This version\n returns 0.\n \"\"\"\n return 0", "docstring": "\"\"\"\n Return the event category for the record.\n\n Override this if you want to specify your own categories. This version\n returns 0.\n \"\"\"", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Lib/logging/handlers.py#L994-L1001", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb", "code/function": "def getEventCategory(self, record):\n \n return 0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "errorCheck", "code": "def errorCheck(self):\n \"\"\"Check for an error if necessary.\n\n This only generates code if the variable's mode is ErrorMode.\n \"\"\"\n if self.flags == ErrorMode:\n self.type.errorCheck(self.name)", "docstring": "\"\"\"Check for an error if necessary.\n\n This only generates code if the variable's mode is ErrorMode.\n \"\"\"", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Tools/bgen/bgen/bgenVariable.py#L91-L97", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb", "code/function": "def errorCheck(self):\n \n if self.flags == ErrorMode:\n self.type.errorCheck(self.name)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_mime_type", "code": "def get_mime_type(file_path_or_bytes: str | bytes) -> str:\n \"\"\"Attempt to determine the MIME type of a file or bytes.\n\n If the input is a file path, we use the built-in `mimetypes` package to guess the MIME type.\n\n If the input is bytes, we use the `filetype` library to determine the MIME type.\n If the library cannot determine the MIME type (data missing magic bytes), we use a few heuristics to guess the type.\n\n Args:\n file_path_or_bytes: The path to the file or the bytes to check.\n\n Returns: The MIME type of the file or bytes.\n \"\"\"\n if isinstance(file_path_or_bytes, bytes):\n filetype_guess = filetype.guess(file_path_or_bytes)\n\n if filetype_guess is None:\n if _is_text(file_path_or_bytes):\n if _is_json(file_path_or_bytes):\n return \"application/json\"\n elif _is_csv(file_path_or_bytes):\n return \"text/csv\"\n return \"text/plain\"\n else:\n return \"application/octet-stream\"\n else:\n return filetype_guess.mime\n else:\n type_, _ = mimetypes.guess_type(file_path_or_bytes)\n if type_ is None:\n return \"application/octet-stream\"\n else:\n return type_", "docstring": "\"\"\"Attempt to determine the MIME type of a file or bytes.\n\n If the input is a file path, we use the built-in `mimetypes` package to guess the MIME type.\n\n If the input is bytes, we use the `filetype` library to determine the MIME type.\n If the library cannot determine the MIME type (data missing magic bytes), we use a few heuristics to guess the type.\n\n Args:\n file_path_or_bytes: The path to the file or the bytes to check.\n\n Returns: The MIME type of the file or bytes.\n \"\"\"", "url": "https://github.com/griptape-ai/griptape/blob/f9ac289715eeb24cb15cbb31e8e32c0e9fb00d45/griptape/utils/file_utils.py#L11-L43", "sha": "f9ac289715eeb24cb15cbb31e8e32c0e9fb00d45", "code/function": "def get_mime_type(file_path_or_bytes: str | bytes) -> str:\n \n if isinstance(file_path_or_bytes, bytes):\n filetype_guess = filetype.guess(file_path_or_bytes)\n\n if filetype_guess is None:\n if _is_text(file_path_or_bytes):\n if _is_json(file_path_or_bytes):\n return \"application/json\"\n elif _is_csv(file_path_or_bytes):\n return \"text/csv\"\n return \"text/plain\"\n else:\n return \"application/octet-stream\"\n else:\n return filetype_guess.mime\n else:\n type_, _ = mimetypes.guess_type(file_path_or_bytes)\n if type_ is None:\n return \"application/octet-stream\"\n else:\n return type_"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fetch_play_list", "code": "async def fetch_play_list(\n self,\n secUid: str,\n cursor: int,\n page_counts: int,\n ) -> UserPlayListFilter:\n \"\"\"\n 用于获取指定用户的作品合集列表\n (Used to get video mix list of specified user)\n\n Args:\n secUid: str: 用户ID (User ID)\n cursor: int: 分页游标 (Page cursor)\n page_counts: int: 分页数量 (Page counts)\n\n Return:\n playlist: UserPlayListFilter: 作品合集列表 (Video mix list)\n \"\"\"\n\n logger.debug(_(\"处理用户:{0} 的作品合集列表\").format(secUid))\n\n async with TiktokCrawler(self.kwargs) as crawler:\n params = UserPlayList(secUid=secUid, cursor=cursor, count=page_counts)\n response = await crawler.fetch_user_play_list(params)\n playlist = UserPlayListFilter(response)\n\n if not playlist.hasPlayList:\n logger.info(_(\"用户:{0} 没有作品合集\").format(secUid))\n return {}\n\n logger.debug(_(\"当前请求的cursor:{0}\").format(cursor))\n logger.debug(\n _(\"作品合集ID:{0} 作品合集标题:{1}\").format(\n playlist.mixId, playlist.mixName\n )\n )\n return playlist", "docstring": "\"\"\"\n 用于获取指定用户的作品合集列表\n (Used to get video mix list of specified user)\n\n Args:\n secUid: str: 用户ID (User ID)\n cursor: int: 分页游标 (Page cursor)\n page_counts: int: 分页数量 (Page counts)\n\n Return:\n playlist: UserPlayListFilter: 作品合集列表 (Video mix list)\n \"\"\"", "url": "https://github.com/Johnserf-Seed/f2/blob/c80eeabf0622b34549e3316f4f711c3f01109bc1/f2/apps/tiktok/handler.py#L688-L724", "sha": "c80eeabf0622b34549e3316f4f711c3f01109bc1", "code/function": "async def fetch_play_list(\n self,\n secUid: str,\n cursor: int,\n page_counts: int,\n ) -> UserPlayListFilter:\n \n\n logger.debug(_(\"处理用户:{0} 的作品合集列表\").format(secUid))\n\n async with TiktokCrawler(self.kwargs) as crawler:\n params = UserPlayList(secUid=secUid, cursor=cursor, count=page_counts)\n response = await crawler.fetch_user_play_list(params)\n playlist = UserPlayListFilter(response)\n\n if not playlist.hasPlayList:\n logger.info(_(\"用户:{0} 没有作品合集\").format(secUid))\n return {}\n\n logger.debug(_(\"当前请求的cursor:{0}\").format(cursor))\n logger.debug(\n _(\"作品合集ID:{0} 作品合集标题:{1}\").format(\n playlist.mixId, playlist.mixName\n )\n )\n return playlist"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "bilinear_interpolate_torch", "code": "def bilinear_interpolate_torch(im, x, y):\n \"\"\"\n Args:\n im: (H, W, C) [y, x]\n x: (N)\n y: (N)\n\n Returns:\n\n \"\"\"\n x0 = torch.floor(x).long()\n x1 = x0 + 1\n\n y0 = torch.floor(y).long()\n y1 = y0 + 1\n\n x0 = torch.clamp(x0, 0, im.shape[1] - 1)\n x1 = torch.clamp(x1, 0, im.shape[1] - 1)\n y0 = torch.clamp(y0, 0, im.shape[0] - 1)\n y1 = torch.clamp(y1, 0, im.shape[0] - 1)\n\n Ia = im[y0, x0]\n Ib = im[y1, x0]\n Ic = im[y0, x1]\n Id = im[y1, x1]\n\n wa = (x1.type_as(x) - x) * (y1.type_as(y) - y)\n wb = (x1.type_as(x) - x) * (y - y0.type_as(y))\n wc = (x - x0.type_as(x)) * (y1.type_as(y) - y)\n wd = (x - x0.type_as(x)) * (y - y0.type_as(y))\n ans = torch.t((torch.t(Ia) * wa)) + torch.t(torch.t(Ib) * wb) + torch.t(torch.t(Ic) * wc) + torch.t(torch.t(Id) * wd)\n return ans", "docstring": "\"\"\"\n Args:\n im: (H, W, C) [y, x]\n x: (N)\n y: (N)\n\n Returns:\n\n \"\"\"", "url": "https://github.com/Haiyang-W/DSVT/blob/8cfc2a6f23eed0b10aabcdc4768c60b184357061/pcdet/models/backbones_3d/pfe/voxel_set_abstraction.py#L11-L42", "sha": "8cfc2a6f23eed0b10aabcdc4768c60b184357061", "code/function": "def bilinear_interpolate_torch(im, x, y):\n \n x0 = torch.floor(x).long()\n x1 = x0 + 1\n\n y0 = torch.floor(y).long()\n y1 = y0 + 1\n\n x0 = torch.clamp(x0, 0, im.shape[1] - 1)\n x1 = torch.clamp(x1, 0, im.shape[1] - 1)\n y0 = torch.clamp(y0, 0, im.shape[0] - 1)\n y1 = torch.clamp(y1, 0, im.shape[0] - 1)\n\n Ia = im[y0, x0]\n Ib = im[y1, x0]\n Ic = im[y0, x1]\n Id = im[y1, x1]\n\n wa = (x1.type_as(x) - x) * (y1.type_as(y) - y)\n wb = (x1.type_as(x) - x) * (y - y0.type_as(y))\n wc = (x - x0.type_as(x)) * (y1.type_as(y) - y)\n wd = (x - x0.type_as(x)) * (y - y0.type_as(y))\n ans = torch.t((torch.t(Ia) * wa)) + torch.t(torch.t(Ib) * wb) + torch.t(torch.t(Ic) * wc) + torch.t(torch.t(Id) * wd)\n return ans"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "merge_train_core", "code": "def merge_train_core(\n anno_2d_file,\n avg_anno_3d_file,\n idxs_file,\n img_id,\n ann_id,\n images,\n annotations,\n):\n \"\"\" Merge training annotations of different objects\"\"\"\n\n with open(anno_2d_file, \"r\") as f:\n annos_2d = json.load(f)\n\n for anno_2d in annos_2d:\n img_id += 1\n info = {\n \"id\": img_id,\n \"img_file\": anno_2d[\"img_file\"],\n }\n images.append(info)\n\n ann_id += 1\n anno = {\n \"image_id\": img_id,\n \"id\": ann_id,\n \"pose_file\": anno_2d[\"pose_file\"],\n \"anno2d_file\": anno_2d[\"anno_file\"],\n \"avg_anno3d_file\": avg_anno_3d_file,\n \"idxs_file\": idxs_file,\n }\n annotations.append(anno)\n\n return img_id, ann_id", "docstring": "\"\"\" Merge training annotations of different objects\"\"\"", "url": "https://github.com/zju3dv/OnePose_Plus_Plus/blob/fc660efb1f594468642d681e35e4843928f16f3e/merge.py#L13-L46", "sha": "fc660efb1f594468642d681e35e4843928f16f3e", "code/function": "def merge_train_core(\n anno_2d_file,\n avg_anno_3d_file,\n idxs_file,\n img_id,\n ann_id,\n images,\n annotations,\n):\n \n\n with open(anno_2d_file, \"r\") as f:\n annos_2d = json.load(f)\n\n for anno_2d in annos_2d:\n img_id += 1\n info = {\n \"id\": img_id,\n \"img_file\": anno_2d[\"img_file\"],\n }\n images.append(info)\n\n ann_id += 1\n anno = {\n \"image_id\": img_id,\n \"id\": ann_id,\n \"pose_file\": anno_2d[\"pose_file\"],\n \"anno2d_file\": anno_2d[\"anno_file\"],\n \"avg_anno3d_file\": avg_anno_3d_file,\n \"idxs_file\": idxs_file,\n }\n annotations.append(anno)\n\n return img_id, ann_id"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_named_beta_schedule", "code": "def get_named_beta_schedule(schedule_name, num_diffusion_timesteps):\n \"\"\"\n Get a pre-defined beta schedule for the given name.\n\n The beta schedule library consists of beta schedules which remain similar\n in the limit of num_diffusion_timesteps.\n Beta schedules may be added, but should not be removed or changed once\n they are committed to maintain backwards compatibility.\n \"\"\"\n #if schedule_name == \"linear\":\n # Linear schedule from Ho et al, extended to work for any number of\n # diffusion steps.\n # scale = 1000 / num_diffusion_timesteps\n # beta_start = scale * 0.0001\n # beta_end = scale * 0.02\n\n # return np.linspace(\n # beta_start, beta_end, num_diffusion_timesteps, dtype=np.float64\n # )\n if schedule_name == \"linear\":\n linear_start = 0.00085\n linear_end = 0.012\n return np.linspace(linear_start ** 0.5, linear_end ** 0.5, num_diffusion_timesteps, dtype=np.float64) ** 2\n elif schedule_name == \"cosine\":\n return betas_for_alpha_bar(\n num_diffusion_timesteps,\n lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2) ** 2,\n )\n else:\n raise NotImplementedError(f\"unknown beta schedule: {schedule_name}\")", "docstring": "\"\"\"\n Get a pre-defined beta schedule for the given name.\n\n The beta schedule library consists of beta schedules which remain similar\n in the limit of num_diffusion_timesteps.\n Beta schedules may be added, but should not be removed or changed once\n they are committed to maintain backwards compatibility.\n \"\"\"", "url": "https://github.com/haha-lisa/RDM-Region-Aware-Diffusion-Model/blob/be0c767f05af74021530962218106c9c20cad3f0/guided_diffusion/gaussian_diffusion.py#L20-L49", "sha": "be0c767f05af74021530962218106c9c20cad3f0", "code/function": "def get_named_beta_schedule(schedule_name, num_diffusion_timesteps):\n \n #if schedule_name == \"linear\":\n # Linear schedule from Ho et al, extended to work for any number of\n # diffusion steps.\n # scale = 1000 / num_diffusion_timesteps\n # beta_start = scale * 0.0001\n # beta_end = scale * 0.02\n\n # return np.linspace(\n # beta_start, beta_end, num_diffusion_timesteps, dtype=np.float64\n # )\n if schedule_name == \"linear\":\n linear_start = 0.00085\n linear_end = 0.012\n return np.linspace(linear_start ** 0.5, linear_end ** 0.5, num_diffusion_timesteps, dtype=np.float64) ** 2\n elif schedule_name == \"cosine\":\n return betas_for_alpha_bar(\n num_diffusion_timesteps,\n lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2) ** 2,\n )\n else:\n raise NotImplementedError(f\"unknown beta schedule: {schedule_name}\")"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CloudflareSolver._parse_proxy", "code": "@staticmethod\n def _parse_proxy(proxy: str) -> Dict[str, str]:\n \"\"\"\n Parse a proxy URL string into a dictionary of proxy parameters for the Playwright browser.\n\n Parameters\n ----------\n proxy : str\n Proxy URL string.\n\n Returns\n -------\n Dict[str, str]\n Dictionary of proxy parameters.\n \"\"\"\n if \"@\" in proxy:\n proxy_regex = re.match(\"(.+)://(.+):(.+)@(.+)\", proxy)\n server = f\"{proxy_regex.group(1)}://{proxy_regex.group(4)}\"\n\n proxy_params = {\n \"server\": server,\n \"username\": proxy_regex.group(2),\n \"password\": proxy_regex.group(3),\n }\n else:\n proxy_params = {\"server\": proxy}\n\n return proxy_params", "docstring": "\"\"\"\n Parse a proxy URL string into a dictionary of proxy parameters for the Playwright browser.\n\n Parameters\n ----------\n proxy : str\n Proxy URL string.\n\n Returns\n -------\n Dict[str, str]\n Dictionary of proxy parameters.\n \"\"\"", "url": "https://github.com/MortezaBashsiz/CFScanner/blob/bcb96dd437e9cf90350ee5581df613b92380827f/other/gist/cfchallenger.py#L96-L123", "sha": "bcb96dd437e9cf90350ee5581df613b92380827f", "code/function": "@staticmethod\n def _parse_proxy(proxy: str) -> Dict[str, str]:\n \n if \"@\" in proxy:\n proxy_regex = re.match(\"(.+)://(.+):(.+)@(.+)\", proxy)\n server = f\"{proxy_regex.group(1)}://{proxy_regex.group(4)}\"\n\n proxy_params = {\n \"server\": server,\n \"username\": proxy_regex.group(2),\n \"password\": proxy_regex.group(3),\n }\n else:\n proxy_params = {\"server\": proxy}\n\n return proxy_params"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "test_correct_or", "code": "def test_correct_or(self) -> None:\n \"\"\"Test correct OR operator.\"\"\"\n expected = event_set(\n timestamps=[1, 2, 3, 4],\n features={\"x\": [True, False, True, True]},\n same_sampling_as=self.evset_1,\n )\n assertOperatorResult(self, self.evset_1 | self.evset_2, expected)", "docstring": "\"\"\"Test correct OR operator.\"\"\"", "url": "https://github.com/google/temporian/blob/1e33b75b9fadfaf1c30bc20725cba9753105e4f9/temporian/core/operators/test/test_logical.py#L43-L50", "sha": "1e33b75b9fadfaf1c30bc20725cba9753105e4f9", "code/function": "def test_correct_or(self) -> None:\n \n expected = event_set(\n timestamps=[1, 2, 3, 4],\n features={\"x\": [True, False, True, True]},\n same_sampling_as=self.evset_1,\n )\n assertOperatorResult(self, self.evset_1 | self.evset_2, expected)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PlotterBackend.finalize_subplot", "code": "@abstractmethod\n def finalize_subplot(\n self,\n ):\n \"\"\"Finalizes a previously added sub plot.\"\"\"\n\n raise NotImplementedError", "docstring": "\"\"\"Finalizes a previously added sub plot.\"\"\"", "url": "https://github.com/google/temporian/blob/1e33b75b9fadfaf1c30bc20725cba9753105e4f9/temporian/implementation/numpy/data/plotter_base.py#L77-L83", "sha": "1e33b75b9fadfaf1c30bc20725cba9753105e4f9", "code/function": "@abstractmethod\n def finalize_subplot(\n self,\n ):\n \n\n raise NotImplementedError"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Pack", "code": "def Pack(self, msg, type_url_prefix='type.googleapis.com/',\n deterministic=None):\n \"\"\"Packs the specified message into current Any message.\"\"\"\n if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':\n self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)\n else:\n self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)\n self.value = msg.SerializeToString(deterministic=deterministic)", "docstring": "\"\"\"Packs the specified message into current Any message.\"\"\"", "url": "https://github.com/BishopFox/cloudfoxable/blob/b7f028ebf2c9d9489e19736942b99fb07b0d0251/aws/challenges/Variable/data/lambda-src-backup/protobuf/internal/well_known_types.py#L64-L71", "sha": "b7f028ebf2c9d9489e19736942b99fb07b0d0251", "code/function": "def Pack(self, msg, type_url_prefix='type.googleapis.com/',\n deterministic=None):\n \n if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':\n self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)\n else:\n self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)\n self.value = msg.SerializeToString(deterministic=deterministic)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_path_get", "code": "def _path_get(self, data, path):\n \"\"\"Return the nested data at the given path.\n\n For instance:\n data = {'foo': ['bar', 'baz']}\n path = ['foo', 0]\n ==> 'bar'\n \"\"\"\n # jmespath isn't used here because it would be difficult to actually\n # create the jmespath query when taking all of the unknowns of key\n # structure into account. Gross though this is, it is simple and not\n # very error prone.\n d = data\n for step in path:\n d = d[step]\n return d", "docstring": "\"\"\"Return the nested data at the given path.\n\n For instance:\n data = {'foo': ['bar', 'baz']}\n path = ['foo', 0]\n ==> 'bar'\n \"\"\"", "url": "https://github.com/BishopFox/cloudfoxable/blob/b7f028ebf2c9d9489e19736942b99fb07b0d0251/aws/challenges/Variable/data/lambda-src/botocore/paginate.py#L143-L158", "sha": "b7f028ebf2c9d9489e19736942b99fb07b0d0251", "code/function": "def _path_get(self, data, path):\n \n # jmespath isn't used here because it would be difficult to actually\n # create the jmespath query when taking all of the unknowns of key\n # structure into account. Gross though this is, it is simple and not\n # very error prone.\n d = data\n for step in path:\n d = d[step]\n return d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "create_client", "code": "def create_client(\n self,\n service_name,\n region_name=None,\n api_version=None,\n use_ssl=True,\n verify=None,\n endpoint_url=None,\n aws_access_key_id=None,\n aws_secret_access_key=None,\n aws_session_token=None,\n config=None,\n ):\n \"\"\"Create a botocore client.\n\n :type service_name: string\n :param service_name: The name of the service for which a client will\n be created. You can use the ``Session.get_available_services()``\n method to get a list of all available service names.\n\n :type region_name: string\n :param region_name: The name of the region associated with the client.\n A client is associated with a single region.\n\n :type api_version: string\n :param api_version: The API version to use. By default, botocore will\n use the latest API version when creating a client. You only need\n to specify this parameter if you want to use a previous API version\n of the client.\n\n :type use_ssl: boolean\n :param use_ssl: Whether or not to use SSL. By default, SSL is used.\n Note that not all services support non-ssl connections.\n\n :type verify: boolean/string\n :param verify: Whether or not to verify SSL certificates.\n By default SSL certificates are verified. You can provide the\n following values:\n\n * False - do not validate SSL certificates. SSL will still be\n used (unless use_ssl is False), but SSL certificates\n will not be verified.\n * path/to/cert/bundle.pem - A filename of the CA cert bundle to\n uses. You can specify this argument if you want to use a\n different CA cert bundle than the one used by botocore.\n\n :type endpoint_url: string\n :param endpoint_url: The complete URL to use for the constructed\n client. Normally, botocore will automatically construct the\n appropriate URL to use when communicating with a service. You can\n specify a complete URL (including the \"http/https\" scheme) to\n override this behavior. If this value is provided, then\n ``use_ssl`` is ignored.\n\n :type aws_access_key_id: string\n :param aws_access_key_id: The access key to use when creating\n the client. This is entirely optional, and if not provided,\n the credentials configured for the session will automatically\n be used. You only need to provide this argument if you want\n to override the credentials used for this specific client.\n\n :type aws_secret_access_key: string\n :param aws_secret_access_key: The secret key to use when creating\n the client. Same semantics as aws_access_key_id above.\n\n :type aws_session_token: string\n :param aws_session_token: The session token to use when creating\n the client. Same semantics as aws_access_key_id above.\n\n :type config: botocore.client.Config\n :param config: Advanced client configuration options. If a value\n is specified in the client config, its value will take precedence\n over environment variables and configuration values, but not over\n a value passed explicitly to the method. If a default config\n object is set on the session, the config object used when creating\n the client will be the result of calling ``merge()`` on the\n default config with the config provided to this call.\n\n :rtype: botocore.client.BaseClient\n :return: A botocore client instance\n\n \"\"\"\n default_client_config = self.get_default_client_config()\n # If a config is provided and a default config is set, then\n # use the config resulting from merging the two.\n if config is not None and default_client_config is not None:\n config = default_client_config.merge(config)\n # If a config was not provided then use the default\n # client config from the session\n elif default_client_config is not None:\n config = default_client_config\n\n region_name = self._resolve_region_name(region_name, config)\n\n # Figure out the verify value base on the various\n # configuration options.\n if verify is None:\n verify = self.get_config_variable('ca_bundle')\n\n if api_version is None:\n api_version = self.get_config_variable('api_versions').get(\n service_name, None\n )\n\n loader = self.get_component('data_loader')\n event_emitter = self.get_component('event_emitter')\n response_parser_factory = self.get_component('response_parser_factory')\n if config is not None and config.signature_version is UNSIGNED:\n credentials = None\n elif (\n aws_access_key_id is not None and aws_secret_access_key is not None\n ):\n credentials = botocore.credentials.Credentials(\n access_key=aws_access_key_id,\n secret_key=aws_secret_access_key,\n token=aws_session_token,\n )\n elif self._missing_cred_vars(aws_access_key_id, aws_secret_access_key):\n raise PartialCredentialsError(\n provider='explicit',\n cred_var=self._missing_cred_vars(\n aws_access_key_id, aws_secret_access_key\n ),\n )\n else:\n credentials = self.get_credentials()\n auth_token = self.get_auth_token()\n endpoint_resolver = self._get_internal_component('endpoint_resolver')\n exceptions_factory = self._get_internal_component('exceptions_factory')\n config_store = self.get_component('config_store')\n defaults_mode = self._resolve_defaults_mode(config, config_store)\n if defaults_mode != 'legacy':\n smart_defaults_factory = self._get_internal_component(\n 'smart_defaults_factory'\n )\n config_store = copy.deepcopy(config_store)\n smart_defaults_factory.merge_smart_defaults(\n config_store, defaults_mode, region_name\n )\n client_creator = botocore.client.ClientCreator(\n loader,\n endpoint_resolver,\n self.user_agent(),\n event_emitter,\n retryhandler,\n translate,\n response_parser_factory,\n exceptions_factory,\n config_store,\n )\n client = client_creator.create_client(\n service_name=service_name,\n region_name=region_name,\n is_secure=use_ssl,\n endpoint_url=endpoint_url,\n verify=verify,\n credentials=credentials,\n scoped_config=self.get_scoped_config(),\n client_config=config,\n api_version=api_version,\n auth_token=auth_token,\n )\n monitor = self._get_internal_component('monitor')\n if monitor is not None:\n monitor.register(client.meta.events)\n return client", "docstring": "\"\"\"Create a botocore client.\n\n :type service_name: string\n :param service_name: The name of the service for which a client will\n be created. You can use the ``Session.get_available_services()``\n method to get a list of all available service names.\n\n :type region_name: string\n :param region_name: The name of the region associated with the client.\n A client is associated with a single region.\n\n :type api_version: string\n :param api_version: The API version to use. By default, botocore will\n use the latest API version when creating a client. You only need\n to specify this parameter if you want to use a previous API version\n of the client.\n\n :type use_ssl: boolean\n :param use_ssl: Whether or not to use SSL. By default, SSL is used.\n Note that not all services support non-ssl connections.\n\n :type verify: boolean/string\n :param verify: Whether or not to verify SSL certificates.\n By default SSL certificates are verified. You can provide the\n following values:\n\n * False - do not validate SSL certificates. SSL will still be\n used (unless use_ssl is False), but SSL certificates\n will not be verified.\n * path/to/cert/bundle.pem - A filename of the CA cert bundle to\n uses. You can specify this argument if you want to use a\n different CA cert bundle than the one used by botocore.\n\n :type endpoint_url: string\n :param endpoint_url: The complete URL to use for the constructed\n client. Normally, botocore will automatically construct the\n appropriate URL to use when communicating with a service. You can\n specify a complete URL (including the \"http/https\" scheme) to\n override this behavior. If this value is provided, then\n ``use_ssl`` is ignored.\n\n :type aws_access_key_id: string\n :param aws_access_key_id: The access key to use when creating\n the client. This is entirely optional, and if not provided,\n the credentials configured for the session will automatically\n be used. You only need to provide this argument if you want\n to override the credentials used for this specific client.\n\n :type aws_secret_access_key: string\n :param aws_secret_access_key: The secret key to use when creating\n the client. Same semantics as aws_access_key_id above.\n\n :type aws_session_token: string\n :param aws_session_token: The session token to use when creating\n the client. Same semantics as aws_access_key_id above.\n\n :type config: botocore.client.Config\n :param config: Advanced client configuration options. If a value\n is specified in the client config, its value will take precedence\n over environment variables and configuration values, but not over\n a value passed explicitly to the method. If a default config\n object is set on the session, the config object used when creating\n the client will be the result of calling ``merge()`` on the\n default config with the config provided to this call.\n\n :rtype: botocore.client.BaseClient\n :return: A botocore client instance\n\n \"\"\"", "url": "https://github.com/BishopFox/cloudfoxable/blob/b7f028ebf2c9d9489e19736942b99fb07b0d0251/aws/challenges/Variable/data/lambda-src/botocore/session.py#L826-L991", "sha": "b7f028ebf2c9d9489e19736942b99fb07b0d0251", "code/function": "def create_client(\n self,\n service_name,\n region_name=None,\n api_version=None,\n use_ssl=True,\n verify=None,\n endpoint_url=None,\n aws_access_key_id=None,\n aws_secret_access_key=None,\n aws_session_token=None,\n config=None,\n ):\n \n default_client_config = self.get_default_client_config()\n # If a config is provided and a default config is set, then\n # use the config resulting from merging the two.\n if config is not None and default_client_config is not None:\n config = default_client_config.merge(config)\n # If a config was not provided then use the default\n # client config from the session\n elif default_client_config is not None:\n config = default_client_config\n\n region_name = self._resolve_region_name(region_name, config)\n\n # Figure out the verify value base on the various\n # configuration options.\n if verify is None:\n verify = self.get_config_variable('ca_bundle')\n\n if api_version is None:\n api_version = self.get_config_variable('api_versions').get(\n service_name, None\n )\n\n loader = self.get_component('data_loader')\n event_emitter = self.get_component('event_emitter')\n response_parser_factory = self.get_component('response_parser_factory')\n if config is not None and config.signature_version is UNSIGNED:\n credentials = None\n elif (\n aws_access_key_id is not None and aws_secret_access_key is not None\n ):\n credentials = botocore.credentials.Credentials(\n access_key=aws_access_key_id,\n secret_key=aws_secret_access_key,\n token=aws_session_token,\n )\n elif self._missing_cred_vars(aws_access_key_id, aws_secret_access_key):\n raise PartialCredentialsError(\n provider='explicit',\n cred_var=self._missing_cred_vars(\n aws_access_key_id, aws_secret_access_key\n ),\n )\n else:\n credentials = self.get_credentials()\n auth_token = self.get_auth_token()\n endpoint_resolver = self._get_internal_component('endpoint_resolver')\n exceptions_factory = self._get_internal_component('exceptions_factory')\n config_store = self.get_component('config_store')\n defaults_mode = self._resolve_defaults_mode(config, config_store)\n if defaults_mode != 'legacy':\n smart_defaults_factory = self._get_internal_component(\n 'smart_defaults_factory'\n )\n config_store = copy.deepcopy(config_store)\n smart_defaults_factory.merge_smart_defaults(\n config_store, defaults_mode, region_name\n )\n client_creator = botocore.client.ClientCreator(\n loader,\n endpoint_resolver,\n self.user_agent(),\n event_emitter,\n retryhandler,\n translate,\n response_parser_factory,\n exceptions_factory,\n config_store,\n )\n client = client_creator.create_client(\n service_name=service_name,\n region_name=region_name,\n is_secure=use_ssl,\n endpoint_url=endpoint_url,\n verify=verify,\n credentials=credentials,\n scoped_config=self.get_scoped_config(),\n client_config=config,\n api_version=api_version,\n auth_token=auth_token,\n )\n monitor = self._get_internal_component('monitor')\n if monitor is not None:\n monitor.register(client.meta.events)\n return client"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self, num_classes, in_channels=3, depth=5, \n start_filts=64, up_mode='transpose', same_channels=False,\n merge_mode='concat', **kwargs):\n \"\"\"\n Arguments:\n in_channels: int, number of channels in the input tensor.\n Default is 3 for RGB images.\n depth: int, number of MaxPools in the U-Net.\n start_filts: int, number of convolutional filters for the \n first conv.\n up_mode: string, type of upconvolution. Choices: 'transpose'\n for transpose convolution or 'upsample' for nearest neighbour\n upsampling.\n \"\"\"\n super(UNet, self).__init__()\n\n if up_mode in ('transpose', 'upsample'):\n self.up_mode = up_mode\n else:\n raise ValueError(\"\\\"{}\\\" is not a valid mode for \"\n \"upsampling. Only \\\"transpose\\\" and \"\n \"\\\"upsample\\\" are allowed.\".format(up_mode))\n \n if merge_mode in ('concat', 'add'):\n self.merge_mode = merge_mode\n else:\n raise ValueError(\"\\\"{}\\\" is not a valid mode for\"\n \"merging up and down paths. \"\n \"Only \\\"concat\\\" and \"\n \"\\\"add\\\" are allowed.\".format(up_mode))\n\n # NOTE: up_mode 'upsample' is incompatible with merge_mode 'add'\n if self.up_mode == 'upsample' and self.merge_mode == 'add':\n raise ValueError(\"up_mode \\\"upsample\\\" is incompatible \"\n \"with merge_mode \\\"add\\\" at the moment \"\n \"because it doesn't make sense to use \"\n \"nearest neighbour to reduce \"\n \"depth channels (by half).\")\n\n self.num_classes = num_classes\n self.in_channels = in_channels\n self.start_filts = start_filts\n self.depth = depth\n\n self.down_convs = []\n self.up_convs = []\n\n # create the encoder pathway and add to a list\n for i in range(depth):\n ins = self.in_channels if i == 0 else outs\n outs = self.start_filts*(2**i) if not same_channels else self.in_channels\n pooling = True if i < depth-1 else False\n #print(\"down ins, outs: \", ins, outs) # [latent dim, 32], [32, 64]...[128, 256]\n\n down_conv = DownConv(ins, outs, pooling=pooling)\n self.down_convs.append(down_conv)\n\n # create the decoder pathway and add to a list\n # - careful! decoding only requires depth-1 blocks\n for i in range(depth-1):\n ins = outs\n outs = ins // 2 if not same_channels else ins \n up_conv = UpConv(ins, outs, up_mode=up_mode,\n merge_mode=merge_mode)\n self.up_convs.append(up_conv)\n #print(\"up ins, outs: \", ins, outs)# [256, 128]...[64, 32]; final 32 to latent is done through self.conv_final \n\n # add the list of modules to current module\n self.down_convs = nn.ModuleList(self.down_convs)\n self.up_convs = nn.ModuleList(self.up_convs)\n\n self.conv_final = conv1x1(outs, self.num_classes)\n\n self.reset_params()", "docstring": "\"\"\"\n Arguments:\n in_channels: int, number of channels in the input tensor.\n Default is 3 for RGB images.\n depth: int, number of MaxPools in the U-Net.\n start_filts: int, number of convolutional filters for the \n first conv.\n up_mode: string, type of upconvolution. Choices: 'transpose'\n for transpose convolution or 'upsample' for nearest neighbour\n upsampling.\n \"\"\"", "url": "https://github.com/princeton-computational-imaging/Diffusion-SDF/blob/7eb2d6786b8864dc45cd180be8ad5c5b2f8c1f8f/models/archs/encoders/conv_pointnet.py#L389-L462", "sha": "7eb2d6786b8864dc45cd180be8ad5c5b2f8c1f8f", "code/function": "def __init__(self, num_classes, in_channels=3, depth=5, \n start_filts=64, up_mode='transpose', same_channels=False,\n merge_mode='concat', **kwargs):\n \n super(UNet, self).__init__()\n\n if up_mode in ('transpose', 'upsample'):\n self.up_mode = up_mode\n else:\n raise ValueError(\"\\\"{}\\\" is not a valid mode for \"\n \"upsampling. Only \\\"transpose\\\" and \"\n \"\\\"upsample\\\" are allowed.\".format(up_mode))\n \n if merge_mode in ('concat', 'add'):\n self.merge_mode = merge_mode\n else:\n raise ValueError(\"\\\"{}\\\" is not a valid mode for\"\n \"merging up and down paths. \"\n \"Only \\\"concat\\\" and \"\n \"\\\"add\\\" are allowed.\".format(up_mode))\n\n # NOTE: up_mode 'upsample' is incompatible with merge_mode 'add'\n if self.up_mode == 'upsample' and self.merge_mode == 'add':\n raise ValueError(\"up_mode \\\"upsample\\\" is incompatible \"\n \"with merge_mode \\\"add\\\" at the moment \"\n \"because it doesn't make sense to use \"\n \"nearest neighbour to reduce \"\n \"depth channels (by half).\")\n\n self.num_classes = num_classes\n self.in_channels = in_channels\n self.start_filts = start_filts\n self.depth = depth\n\n self.down_convs = []\n self.up_convs = []\n\n # create the encoder pathway and add to a list\n for i in range(depth):\n ins = self.in_channels if i == 0 else outs\n outs = self.start_filts*(2**i) if not same_channels else self.in_channels\n pooling = True if i < depth-1 else False\n #print(\"down ins, outs: \", ins, outs) # [latent dim, 32], [32, 64]...[128, 256]\n\n down_conv = DownConv(ins, outs, pooling=pooling)\n self.down_convs.append(down_conv)\n\n # create the decoder pathway and add to a list\n # - careful! decoding only requires depth-1 blocks\n for i in range(depth-1):\n ins = outs\n outs = ins // 2 if not same_channels else ins \n up_conv = UpConv(ins, outs, up_mode=up_mode,\n merge_mode=merge_mode)\n self.up_convs.append(up_conv)\n #print(\"up ins, outs: \", ins, outs)# [256, 128]...[64, 32]; final 32 to latent is done through self.conv_final \n\n # add the list of modules to current module\n self.down_convs = nn.ModuleList(self.down_convs)\n self.up_convs = nn.ModuleList(self.up_convs)\n\n self.conv_final = conv1x1(outs, self.num_classes)\n\n self.reset_params()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "marginal_lambda", "code": "def marginal_lambda(self, t):\n \"\"\"\n Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T].\n \"\"\"\n log_mean_coeff = self.marginal_log_mean_coeff(t)\n log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff))\n return log_mean_coeff - log_std", "docstring": "\"\"\"\n Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T].\n \"\"\"", "url": "https://github.com/wl-zhao/UniPC/blob/cf9de85bf2ed68137e6fba5f165f451677b174c4/example/score_sde_pytorch/uni_pc.py#L128-L134", "sha": "cf9de85bf2ed68137e6fba5f165f451677b174c4", "code/function": "def marginal_lambda(self, t):\n \n log_mean_coeff = self.marginal_log_mean_coeff(t)\n log_std = 0.5 * torch.log(1. - torch.exp(2. * log_mean_coeff))\n return log_mean_coeff - log_std"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "build_fno", "code": "def build_fno(self, num_fno_modes: List[int]) -> None:\n \"\"\"construct FNO block.\n Parameters\n ----------\n num_fno_modes : List[int]\n Number of Fourier modes kept in spectral convolutions\n\n \"\"\"\n # Build Neural Fourier Operators\n self.spconv_layers = nn.ModuleList()\n self.conv_layers = nn.ModuleList()\n for _ in range(self.num_fno_layers):\n self.spconv_layers.append(\n layers.SpectralConv3d(\n self.fno_width,\n self.fno_width,\n num_fno_modes[0],\n num_fno_modes[1],\n num_fno_modes[2],\n )\n )\n self.conv_layers.append(nn.Conv3d(self.fno_width, self.fno_width, 1))", "docstring": "\"\"\"construct FNO block.\n Parameters\n ----------\n num_fno_modes : List[int]\n Number of Fourier modes kept in spectral convolutions\n\n \"\"\"", "url": "https://github.com/NVIDIA/modulus/blob/e6d7b02fb19ab9cdb3138de228ca3d6f0c99e7d1/modulus/models/fno/fno.py#L463-L484", "sha": "e6d7b02fb19ab9cdb3138de228ca3d6f0c99e7d1", "code/function": "def build_fno(self, num_fno_modes: List[int]) -> None:\n \n # Build Neural Fourier Operators\n self.spconv_layers = nn.ModuleList()\n self.conv_layers = nn.ModuleList()\n for _ in range(self.num_fno_layers):\n self.spconv_layers.append(\n layers.SpectralConv3d(\n self.fno_width,\n self.fno_width,\n num_fno_modes[0],\n num_fno_modes[1],\n num_fno_modes[2],\n )\n )\n self.conv_layers.append(nn.Conv3d(self.fno_width, self.fno_width, 1))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "is_ignored", "code": "def is_ignored(path, working_path, ignore_patterns):\n \"\"\"\n Check if the path needs to be ignored\n \"\"\"\n # Get the git root path to stop the search\n git_root_path = Path(__file__) / Path(working_path)\n git_root_path = git_root_path.resolve()\n\n for pattern in ignore_patterns:\n normalized_pattern = pattern.rstrip(\"/\")\n\n # Filter paths that are outside git root\n relevant_children = [\n part\n for part in [path] + list(path.parents)\n if git_root_path in part.parents or part == git_root_path\n ]\n\n # Check the directory itself and each parent directory\n for part in relevant_children:\n # Match directories (patterns ending with '/')\n if pattern.endswith(\"/\") and part.is_dir():\n if fnmatch.fnmatch(part.name, normalized_pattern):\n return True\n\n # Match files or directories without a trailing '/'\n if not pattern.endswith(\"/\") and (\n fnmatch.fnmatch(str(part), pattern)\n or fnmatch.fnmatch(part.name, normalized_pattern)\n ):\n return True\n\n return False", "docstring": "\"\"\"\n Check if the path needs to be ignored\n \"\"\"", "url": "https://github.com/NVIDIA/modulus/blob/e6d7b02fb19ab9cdb3138de228ca3d6f0c99e7d1/test/ci_tests/header_check.py#L42-L74", "sha": "e6d7b02fb19ab9cdb3138de228ca3d6f0c99e7d1", "code/function": "def is_ignored(path, working_path, ignore_patterns):\n \n # Get the git root path to stop the search\n git_root_path = Path(__file__) / Path(working_path)\n git_root_path = git_root_path.resolve()\n\n for pattern in ignore_patterns:\n normalized_pattern = pattern.rstrip(\"/\")\n\n # Filter paths that are outside git root\n relevant_children = [\n part\n for part in [path] + list(path.parents)\n if git_root_path in part.parents or part == git_root_path\n ]\n\n # Check the directory itself and each parent directory\n for part in relevant_children:\n # Match directories (patterns ending with '/')\n if pattern.endswith(\"/\") and part.is_dir():\n if fnmatch.fnmatch(part.name, normalized_pattern):\n return True\n\n # Match files or directories without a trailing '/'\n if not pattern.endswith(\"/\") and (\n fnmatch.fnmatch(str(part), pattern)\n or fnmatch.fnmatch(part.name, normalized_pattern)\n ):\n return True\n\n return False"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "test_irfft_ort_op", "code": "@check_ort_version()\n@pytest.mark.parametrize(\"dft_dim\", [-1, 1])\ndef test_irfft_ort_op(\n test_data: Tensor, norm: str, dft_dim: int, rtol: float = 1e-5, atol: float = 1e-5\n):\n \"\"\"Test IRFFT onnx runtime operation is consistent with torch irfft\"\"\"\n x = test_data.transpose(-1, dft_dim)\n x = fft.rfft(x, dim=dft_dim, norm=norm)\n\n class CustomIrfft(nn.Module):\n def forward(self, y):\n return fft.irfft(y, dim=dft_dim, norm=norm)\n\n model = CustomIrfft()\n output = model(x)\n\n x0 = torch.view_as_real(x)\n onnx_model = export_to_onnx_stream(model, x0)\n output_ort = run_onnx_inference(onnx_model, (x0,))\n assert len(output_ort) == 1\n output_onnx = torch.Tensor(output_ort[0])\n\n assert torch.allclose(output, output_onnx, rtol, atol)", "docstring": "\"\"\"Test IRFFT onnx runtime operation is consistent with torch irfft\"\"\"", "url": "https://github.com/NVIDIA/modulus/blob/e6d7b02fb19ab9cdb3138de228ca3d6f0c99e7d1/test/deploy/test_onnx_fft.py#L168-L190", "sha": "e6d7b02fb19ab9cdb3138de228ca3d6f0c99e7d1", "code/function": "@check_ort_version()\n@pytest.mark.parametrize(\"dft_dim\", [-1, 1])\ndef test_irfft_ort_op(\n test_data: Tensor, norm: str, dft_dim: int, rtol: float = 1e-5, atol: float = 1e-5\n):\n \n x = test_data.transpose(-1, dft_dim)\n x = fft.rfft(x, dim=dft_dim, norm=norm)\n\n class CustomIrfft(nn.Module):\n def forward(self, y):\n return fft.irfft(y, dim=dft_dim, norm=norm)\n\n model = CustomIrfft()\n output = model(x)\n\n x0 = torch.view_as_real(x)\n onnx_model = export_to_onnx_stream(model, x0)\n output_ort = run_onnx_inference(onnx_model, (x0,))\n assert len(output_ort) == 1\n output_onnx = torch.Tensor(output_ort[0])\n\n assert torch.allclose(output, output_onnx, rtol, atol)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "build_sensor_observation_space", "code": "def build_sensor_observation_space(self) -> gymnasium.spaces.Dict:\n \"\"\"Build observation space for all sensor types.\n\n Returns:\n gymnasium.spaces.Dict: The observation space generated by sensors bound with agent.\n \"\"\"\n obs_space_dict = {}\n\n for sensor in self.sensor_conf.sensors: # Explicitly listed sensors\n dim = self.sensor_info.sensor_dim[sensor]\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (dim,), dtype=np.float64)\n # Velocities don't have wraparound effects that rotational positions do\n # Wraparounds are not kind to neural networks\n # Whereas the angle 2*pi is very close to 0, this isn't true in the network\n # In theory the network could learn this, but in practice we simplify it\n # when the sensors_angle_components switch is enabled.\n for sensor in self.sensor_info.hinge_vel_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float64)\n for sensor in self.sensor_info.ballangvel_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float64)\n if self.sensor_info.freejoint_pos_name:\n sensor = self.sensor_info.freejoint_pos_name\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float64)\n obs_space_dict[sensor + '1'] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (1,),\n dtype=np.float64,\n )\n if self.sensor_info.freejoint_qvel_name:\n sensor = self.sensor_info.freejoint_qvel_name\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float64)\n obs_space_dict[sensor + '1'] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (3,),\n dtype=np.float64,\n )\n # Angular positions have wraparound effects, so output something more friendly\n if self.sensor_conf.sensors_angle_components:\n # Single joints are turned into sin(x), cos(x) pairs\n # These should be easier to learn for neural networks,\n # Since for angles, small perturbations in angle give small differences in sin/cos\n for sensor in self.sensor_info.hinge_pos_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (2,),\n dtype=np.float64,\n )\n # Quaternions are turned into 3x3 rotation matrices\n # Quaternions have a wraparound issue in how they are normalized,\n # where the convention is to change the sign so the first element to be positive.\n # If the first element is close to 0, this can mean small differences in rotation\n # lead to large differences in value as the latter elements change sign.\n # This also means that the first element of the quaternion is not expectation zero.\n # The SO(3) rotation representation would be a good replacement here,\n # since it smoothly varies between values in all directions (the property we want),\n # but right now we have very little code to support SO(3) rotations.\n # Instead we use a 3x3 rotation matrix, which if normalized, smoothly varies as well.\n for sensor in self.sensor_info.ballquat_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (3, 3),\n dtype=np.float64,\n )\n else:\n # Otherwise include the sensor without any processing\n for sensor in self.sensor_info.hinge_pos_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (1,),\n dtype=np.float64,\n )\n for sensor in self.sensor_info.ballquat_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (4,),\n dtype=np.float64,\n )\n\n return obs_space_dict", "docstring": "\"\"\"Build observation space for all sensor types.\n\n Returns:\n gymnasium.spaces.Dict: The observation space generated by sensors bound with agent.\n \"\"\"", "url": "https://github.com/PKU-Alignment/safety-gymnasium/blob/3b117c1ee896b62cd47a527d201d1117fd36ef3d/safety_gymnasium/tasks/safe_multi_agent/bases/base_agent.py#L345-L429", "sha": "3b117c1ee896b62cd47a527d201d1117fd36ef3d", "code/function": "def build_sensor_observation_space(self) -> gymnasium.spaces.Dict:\n \n obs_space_dict = {}\n\n for sensor in self.sensor_conf.sensors: # Explicitly listed sensors\n dim = self.sensor_info.sensor_dim[sensor]\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (dim,), dtype=np.float64)\n # Velocities don't have wraparound effects that rotational positions do\n # Wraparounds are not kind to neural networks\n # Whereas the angle 2*pi is very close to 0, this isn't true in the network\n # In theory the network could learn this, but in practice we simplify it\n # when the sensors_angle_components switch is enabled.\n for sensor in self.sensor_info.hinge_vel_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float64)\n for sensor in self.sensor_info.ballangvel_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float64)\n if self.sensor_info.freejoint_pos_name:\n sensor = self.sensor_info.freejoint_pos_name\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float64)\n obs_space_dict[sensor + '1'] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (1,),\n dtype=np.float64,\n )\n if self.sensor_info.freejoint_qvel_name:\n sensor = self.sensor_info.freejoint_qvel_name\n obs_space_dict[sensor] = gymnasium.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float64)\n obs_space_dict[sensor + '1'] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (3,),\n dtype=np.float64,\n )\n # Angular positions have wraparound effects, so output something more friendly\n if self.sensor_conf.sensors_angle_components:\n # Single joints are turned into sin(x), cos(x) pairs\n # These should be easier to learn for neural networks,\n # Since for angles, small perturbations in angle give small differences in sin/cos\n for sensor in self.sensor_info.hinge_pos_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (2,),\n dtype=np.float64,\n )\n # Quaternions are turned into 3x3 rotation matrices\n # Quaternions have a wraparound issue in how they are normalized,\n # where the convention is to change the sign so the first element to be positive.\n # If the first element is close to 0, this can mean small differences in rotation\n # lead to large differences in value as the latter elements change sign.\n # This also means that the first element of the quaternion is not expectation zero.\n # The SO(3) rotation representation would be a good replacement here,\n # since it smoothly varies between values in all directions (the property we want),\n # but right now we have very little code to support SO(3) rotations.\n # Instead we use a 3x3 rotation matrix, which if normalized, smoothly varies as well.\n for sensor in self.sensor_info.ballquat_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (3, 3),\n dtype=np.float64,\n )\n else:\n # Otherwise include the sensor without any processing\n for sensor in self.sensor_info.hinge_pos_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (1,),\n dtype=np.float64,\n )\n for sensor in self.sensor_info.ballquat_names:\n obs_space_dict[sensor] = gymnasium.spaces.Box(\n -np.inf,\n np.inf,\n (4,),\n dtype=np.float64,\n )\n\n return obs_space_dict"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "update_ema", "code": "def update_ema(target_params, source_params, rate=0.99):\n \"\"\"\n Update target parameters to be closer to those of source parameters using\n an exponential moving average.\n\n :param target_params: the target parameter sequence.\n :param source_params: the source parameter sequence.\n :param rate: the EMA rate (closer to 1 means slower).\n \"\"\"\n for targ, src in zip(target_params, source_params):\n targ.detach().mul_(rate).add_(src, alpha=1 - rate)", "docstring": "\"\"\"\n Update target parameters to be closer to those of source parameters using\n an exponential moving average.\n\n :param target_params: the target parameter sequence.\n :param source_params: the source parameter sequence.\n :param rate: the EMA rate (closer to 1 means slower).\n \"\"\"", "url": "https://github.com/forever208/DDPM-IP/blob/1f767192e8b60e1694c670672d5adfd8b42256f5/guided_diffusion/nn.py#L55-L65", "sha": "1f767192e8b60e1694c670672d5adfd8b42256f5", "code/function": "def update_ema(target_params, source_params, rate=0.99):\n \n for targ, src in zip(target_params, source_params):\n targ.detach().mul_(rate).add_(src, alpha=1 - rate)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward", "code": "def forward(self, query_feats, key_feats):\n \"\"\"Forward function.\"\"\"\n context = super(ObjectAttentionBlock,\n self).forward(query_feats, key_feats)\n output = self.bottleneck(torch.cat([context, query_feats], dim=1))\n if self.query_downsample is not None:\n output = resize(query_feats)\n\n return output", "docstring": "\"\"\"Forward function.\"\"\"", "url": "https://github.com/lllyasviel/ControlNet/blob/ed85cd1e25a5ed592f7d8178495b4483de0331bf/annotator/uniformer/mmseg/models/decode_heads/ocr_head.py#L73-L81", "sha": "ed85cd1e25a5ed592f7d8178495b4483de0331bf", "code/function": "def forward(self, query_feats, key_feats):\n \n context = super(ObjectAttentionBlock,\n self).forward(query_feats, key_feats)\n output = self.bottleneck(torch.cat([context, query_feats], dim=1))\n if self.query_downsample is not None:\n output = resize(query_feats)\n\n return output"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "load_data", "code": "def load_data(\n self,\n state: Optional[IssueState] = IssueState.OPEN,\n labelFilters: Optional[List[Tuple[str, FilterType]]] = None,\n ) -> List[Document]:\n \"\"\"\n Load issues from a repository and converts them to documents.\n\n Each issue is converted to a document by doing the following:\n\n - The text of the document is the concatenation of the title and the body of the issue.\n - The title of the document is the title of the issue.\n - The doc_id of the document is the issue number.\n - The extra_info of the document is a dictionary with the following keys:\n - state: State of the issue.\n - created_at: Date when the issue was created.\n - closed_at: Date when the issue was closed. Only present if the issue is closed.\n - url: URL of the issue.\n - assignee: Login of the user assigned to the issue. Only present if the issue is assigned.\n - The embedding of the document is None.\n - The doc_hash of the document is None.\n\n Args:\n - state (IssueState): State of the issues to retrieve. Default is IssueState.OPEN.\n - labelFilters: an optional list of filters to apply to the issue list based on labels.\n\n :return: list of documents\n \"\"\"\n documents = []\n page = 1\n # Loop until there are no more issues\n while True:\n issues: Dict = self._loop.run_until_complete(\n self._github_client.get_issues(\n self._owner, self._repo, state=state.value, page=page\n )\n )\n\n if len(issues) == 0:\n print_if_verbose(self._verbose, \"No more issues found, stopping\")\n\n break\n print_if_verbose(\n self._verbose, f\"Found {len(issues)} issues in the repo page {page}\"\n )\n page += 1\n filterCount = 0\n for issue in issues:\n if not self._must_include(labelFilters, issue):\n filterCount += 1\n continue\n title = issue[\"title\"]\n body = issue[\"body\"]\n document = Document(\n doc_id=str(issue[\"number\"]),\n text=f\"{title}\\n{body}\",\n )\n extra_info = {\n \"state\": issue[\"state\"],\n \"created_at\": issue[\"created_at\"],\n # url is the API URL\n \"url\": issue[\"url\"],\n # source is the HTML URL, more conveninent for humans\n \"source\": issue[\"html_url\"],\n }\n if issue[\"closed_at\"] is not None:\n extra_info[\"closed_at\"] = issue[\"closed_at\"]\n if issue[\"assignee\"] is not None:\n extra_info[\"assignee\"] = issue[\"assignee\"][\"login\"]\n if issue[\"labels\"] is not None:\n extra_info[\"labels\"] = [label[\"name\"] for label in issue[\"labels\"]]\n document.extra_info = extra_info\n documents.append(document)\n\n print_if_verbose(self._verbose, f\"Resulted in {len(documents)} documents\")\n if labelFilters is not None:\n print_if_verbose(self._verbose, f\"Filtered out {filterCount} issues\")\n\n return documents", "docstring": "\"\"\"\n Load issues from a repository and converts them to documents.\n\n Each issue is converted to a document by doing the following:\n\n - The text of the document is the concatenation of the title and the body of the issue.\n - The title of the document is the title of the issue.\n - The doc_id of the document is the issue number.\n - The extra_info of the document is a dictionary with the following keys:\n - state: State of the issue.\n - created_at: Date when the issue was created.\n - closed_at: Date when the issue was closed. Only present if the issue is closed.\n - url: URL of the issue.\n - assignee: Login of the user assigned to the issue. Only present if the issue is assigned.\n - The embedding of the document is None.\n - The doc_hash of the document is None.\n\n Args:\n - state (IssueState): State of the issues to retrieve. Default is IssueState.OPEN.\n - labelFilters: an optional list of filters to apply to the issue list based on labels.\n\n :return: list of documents\n \"\"\"", "url": "https://github.com/run-llama/llama-hub/blob/b476d3bd2c963cad9dfe2944de7d6ce408aac65a/llama_hub/github_repo_issues/base.py#L118-L196", "sha": "b476d3bd2c963cad9dfe2944de7d6ce408aac65a", "code/function": "def load_data(\n self,\n state: Optional[IssueState] = IssueState.OPEN,\n labelFilters: Optional[List[Tuple[str, FilterType]]] = None,\n ) -> List[Document]:\n \n documents = []\n page = 1\n # Loop until there are no more issues\n while True:\n issues: Dict = self._loop.run_until_complete(\n self._github_client.get_issues(\n self._owner, self._repo, state=state.value, page=page\n )\n )\n\n if len(issues) == 0:\n print_if_verbose(self._verbose, \"No more issues found, stopping\")\n\n break\n print_if_verbose(\n self._verbose, f\"Found {len(issues)} issues in the repo page {page}\"\n )\n page += 1\n filterCount = 0\n for issue in issues:\n if not self._must_include(labelFilters, issue):\n filterCount += 1\n continue\n title = issue[\"title\"]\n body = issue[\"body\"]\n document = Document(\n doc_id=str(issue[\"number\"]),\n text=f\"{title}\\n{body}\",\n )\n extra_info = {\n \"state\": issue[\"state\"],\n \"created_at\": issue[\"created_at\"],\n # url is the API URL\n \"url\": issue[\"url\"],\n # source is the HTML URL, more conveninent for humans\n \"source\": issue[\"html_url\"],\n }\n if issue[\"closed_at\"] is not None:\n extra_info[\"closed_at\"] = issue[\"closed_at\"]\n if issue[\"assignee\"] is not None:\n extra_info[\"assignee\"] = issue[\"assignee\"][\"login\"]\n if issue[\"labels\"] is not None:\n extra_info[\"labels\"] = [label[\"name\"] for label in issue[\"labels\"]]\n document.extra_info = extra_info\n documents.append(document)\n\n print_if_verbose(self._verbose, f\"Resulted in {len(documents)} documents\")\n if labelFilters is not None:\n print_if_verbose(self._verbose, f\"Filtered out {filterCount} issues\")\n\n return documents"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "read_page", "code": "def read_page(self, page_id: str) -> str:\n \"\"\"Read a page.\"\"\"\n return self._read_block(page_id)", "docstring": "\"\"\"Read a page.\"\"\"", "url": "https://github.com/run-llama/llama-hub/blob/b476d3bd2c963cad9dfe2944de7d6ce408aac65a/llama_hub/notion/base.py#L89-L91", "sha": "b476d3bd2c963cad9dfe2944de7d6ce408aac65a", "code/function": "def read_page(self, page_id: str) -> str:\n \n return self._read_block(page_id)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "is_risk_title", "code": "def is_risk_title(title: str, filing_type: Optional[str]) -> bool:\n \"\"\"Checks to see if the title matches the pattern for the risk heading.\"\"\"\n if filing_type in REPORT_TYPES:\n return is_10k_risk_title(clean_sec_text(title, lowercase=True))\n elif filing_type in S1_TYPES:\n return is_s1_risk_title(clean_sec_text(title, lowercase=True))\n return False", "docstring": "\"\"\"Checks to see if the title matches the pattern for the risk heading.\"\"\"", "url": "https://github.com/run-llama/llama-hub/blob/b476d3bd2c963cad9dfe2944de7d6ce408aac65a/llama_hub/sec_filings/prepline_sec_filings/sec_document.py#L359-L365", "sha": "b476d3bd2c963cad9dfe2944de7d6ce408aac65a", "code/function": "def is_risk_title(title: str, filing_type: Optional[str]) -> bool:\n \n if filing_type in REPORT_TYPES:\n return is_10k_risk_title(clean_sec_text(title, lowercase=True))\n elif filing_type in S1_TYPES:\n return is_s1_risk_title(clean_sec_text(title, lowercase=True))\n return False"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_configure_optimizer", "code": "def _configure_optimizer(self, client_optimizer, model_parameters):\n \"\"\"Config basic optimizer and optimizer.\n\n Args:\n client_optimizer (torch.optim.Optimizer or callable): client optimizer.\n model_parameters (list): list of model parameters.\n \"\"\"\n if client_optimizer is not None:\n if isinstance(client_optimizer, tuple(self._supported_optims())):\n client_optimizer.param_groups[:] = [\n pg for pg in client_optimizer.param_groups if len(pg['params']) != 0\n ]\n log_dist(\"Removing param_group that has no 'params' in the client Optimizer\", ranks=[0])\n\n basic_optimizer = client_optimizer\n log_dist('Using client Optimizer as basic optimizer', ranks=[0])\n else:\n basic_optimizer = client_optimizer(model_parameters)\n log_dist('Using client callable to create basic optimizer', ranks=[0])\n else:\n basic_optimizer = self._configure_basic_optimizer(model_parameters)\n log_dist(f'Using DeepSpeed Optimizer param name {self.optimizer_name()} as basic optimizer', ranks=[0])\n\n if self.msamp_enabled():\n optlevel = self.msamp_optlevel()\n if optlevel == 'O3':\n # O3 is for ZeRO and need to cast to O2 for MS-AMP.\n optlevel = 'O2'\n use_te = self.msamp_use_te()\n model, basic_optimizer = msamp_initialize(self.module, basic_optimizer, optlevel, use_te)\n self._set_client_model(model)\n # We need to reset param names after msamp initialize.\n self.param_names = {param: name for name, param in model.named_parameters()}\n\n self._check_for_duplicates(basic_optimizer)\n\n self.basic_optimizer = basic_optimizer\n log_dist('DeepSpeed Basic Optimizer = {}'.format(basic_optimizer.__class__.__name__), ranks=[0])\n\n optimizer_wrapper = self._do_optimizer_sanity_check(basic_optimizer)\n if optimizer_wrapper == ZERO_OPTIMIZATION:\n self.optimizer = self._configure_zero_optimizer(basic_optimizer)\n elif optimizer_wrapper == FP8:\n self.optimizer = self._configure_fp8_optimizer(basic_optimizer, optimizer_wrapper)\n elif optimizer_wrapper == AMP:\n amp_params = self.amp_params()\n log_dist(f'Initializing AMP with these params: {amp_params}', ranks=[0])\n model, self.optimizer = amp.initialize(self.module, basic_optimizer, **amp_params)\n self._set_client_model(model)\n self._broadcast_model()\n # TODO: maybe need to broadcast experts differently?\n elif optimizer_wrapper == FP16:\n self.optimizer = self._configure_fp16_optimizer(basic_optimizer)\n elif optimizer_wrapper == BFLOAT16:\n self.optimizer = self._configure_bf16_optimizer(basic_optimizer)\n else:\n self.optimizer = basic_optimizer\n\n log_dist('DeepSpeed Final Optimizer = {}'.format(self.optimizer_name()), ranks=[0])\n\n self.compression_scheduler = self._configure_compression_scheduler()\n self.quantizer = self._configure_quantization()", "docstring": "\"\"\"Config basic optimizer and optimizer.\n\n Args:\n client_optimizer (torch.optim.Optimizer or callable): client optimizer.\n model_parameters (list): list of model parameters.\n \"\"\"", "url": "https://github.com/Azure/MS-AMP/blob/8de09e00c45ce1ce44dcd0ccf1d4e6e4f4ca04fa/msamp/deepspeed/runtime/engine.py#L63-L124", "sha": "8de09e00c45ce1ce44dcd0ccf1d4e6e4f4ca04fa", "code/function": "def _configure_optimizer(self, client_optimizer, model_parameters):\n \n if client_optimizer is not None:\n if isinstance(client_optimizer, tuple(self._supported_optims())):\n client_optimizer.param_groups[:] = [\n pg for pg in client_optimizer.param_groups if len(pg['params']) != 0\n ]\n log_dist(\"Removing param_group that has no 'params' in the client Optimizer\", ranks=[0])\n\n basic_optimizer = client_optimizer\n log_dist('Using client Optimizer as basic optimizer', ranks=[0])\n else:\n basic_optimizer = client_optimizer(model_parameters)\n log_dist('Using client callable to create basic optimizer', ranks=[0])\n else:\n basic_optimizer = self._configure_basic_optimizer(model_parameters)\n log_dist(f'Using DeepSpeed Optimizer param name {self.optimizer_name()} as basic optimizer', ranks=[0])\n\n if self.msamp_enabled():\n optlevel = self.msamp_optlevel()\n if optlevel == 'O3':\n # O3 is for ZeRO and need to cast to O2 for MS-AMP.\n optlevel = 'O2'\n use_te = self.msamp_use_te()\n model, basic_optimizer = msamp_initialize(self.module, basic_optimizer, optlevel, use_te)\n self._set_client_model(model)\n # We need to reset param names after msamp initialize.\n self.param_names = {param: name for name, param in model.named_parameters()}\n\n self._check_for_duplicates(basic_optimizer)\n\n self.basic_optimizer = basic_optimizer\n log_dist('DeepSpeed Basic Optimizer = {}'.format(basic_optimizer.__class__.__name__), ranks=[0])\n\n optimizer_wrapper = self._do_optimizer_sanity_check(basic_optimizer)\n if optimizer_wrapper == ZERO_OPTIMIZATION:\n self.optimizer = self._configure_zero_optimizer(basic_optimizer)\n elif optimizer_wrapper == FP8:\n self.optimizer = self._configure_fp8_optimizer(basic_optimizer, optimizer_wrapper)\n elif optimizer_wrapper == AMP:\n amp_params = self.amp_params()\n log_dist(f'Initializing AMP with these params: {amp_params}', ranks=[0])\n model, self.optimizer = amp.initialize(self.module, basic_optimizer, **amp_params)\n self._set_client_model(model)\n self._broadcast_model()\n # TODO: maybe need to broadcast experts differently?\n elif optimizer_wrapper == FP16:\n self.optimizer = self._configure_fp16_optimizer(basic_optimizer)\n elif optimizer_wrapper == BFLOAT16:\n self.optimizer = self._configure_bf16_optimizer(basic_optimizer)\n else:\n self.optimizer = basic_optimizer\n\n log_dist('DeepSpeed Final Optimizer = {}'.format(self.optimizer_name()), ranks=[0])\n\n self.compression_scheduler = self._configure_compression_scheduler()\n self.quantizer = self._configure_quantization()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__getattr__", "code": "def __getattr__(self, name):\n \"\"\"Get attribute by name.\n\n Args:\n name (str): Attribute name.\n\n Returns:\n Attribute value.\n \"\"\"\n return self.__dict__.get(name, getattr(self.__dict__['ctx'], name))", "docstring": "\"\"\"Get attribute by name.\n\n Args:\n name (str): Attribute name.\n\n Returns:\n Attribute value.\n \"\"\"", "url": "https://github.com/Azure/MS-AMP/blob/8de09e00c45ce1ce44dcd0ccf1d4e6e4f4ca04fa/msamp/te/modules.py#L156-L165", "sha": "8de09e00c45ce1ce44dcd0ccf1d4e6e4f4ca04fa", "code/function": "def __getattr__(self, name):\n \n return self.__dict__.get(name, getattr(self.__dict__['ctx'], name))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "load_safetensors", "code": "def load_safetensors(self, state_dict_dir):\n \"\"\"\n Lazily load the safetensors by associating each weight with the filename.\n \"\"\"\n filename = os.path.join(state_dict_dir, _SAFETENSORS_MODEL_FILENAME)\n with safe_open(filename, framework=\"pt\") as f:\n keys = f.keys()\n key_to_filename = dict(zip(keys, [_SAFETENSORS_MODEL_FILENAME] * len(keys)))\n self._load_from_state_dict_dir(state_dict_dir, key_to_filename)", "docstring": "\"\"\"\n Lazily load the safetensors by associating each weight with the filename.\n \"\"\"", "url": "https://github.com/huggingface/optimum-neuron/blob/558379f8a1f7f67820ea219323f9af434a6ae2ba/optimum/neuron/backends/hlo/module.py#L174-L182", "sha": "558379f8a1f7f67820ea219323f9af434a6ae2ba", "code/function": "def load_safetensors(self, state_dict_dir):\n \n filename = os.path.join(state_dict_dir, _SAFETENSORS_MODEL_FILENAME)\n with safe_open(filename, framework=\"pt\") as f:\n keys = f.keys()\n key_to_filename = dict(zip(keys, [_SAFETENSORS_MODEL_FILENAME] * len(keys)))\n self._load_from_state_dict_dir(state_dict_dir, key_to_filename)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "export_csv", "code": "def export_csv(self, path):\n \"\"\"\n Export the capture to a CSV file.\n\n Args:\n path (str): Path to the destination file.\n\n Returns:\n None\n \"\"\"\n\n names = [p.name for p in self._probes]\n values = [self.get_trace(n) for n in names]\n\n # Transpose list of lists so that data flows top-to-bottom instead of\n # left-to-right\n values_transpose = [list(x) for x in zip(*values)]\n\n import csv\n\n with open(path, \"w\") as f:\n writer = csv.writer(f)\n\n writer.writerow(names)\n writer.writerows(values_transpose)", "docstring": "\"\"\"\n Export the capture to a CSV file.\n\n Args:\n path (str): Path to the destination file.\n\n Returns:\n None\n \"\"\"", "url": "https://github.com/fischermoseley/manta/blob/e11d9a8315a7a1c91ab5087955d575fc149da06c/src/manta/logic_analyzer/capture.py#L69-L93", "sha": "e11d9a8315a7a1c91ab5087955d575fc149da06c", "code/function": "def export_csv(self, path):\n \n\n names = [p.name for p in self._probes]\n values = [self.get_trace(n) for n in names]\n\n # Transpose list of lists so that data flows top-to-bottom instead of\n # left-to-right\n values_transpose = [list(x) for x in zip(*values)]\n\n import csv\n\n with open(path, \"w\") as f:\n writer = csv.writer(f)\n\n writer.writerow(names)\n writer.writerows(values_transpose)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "test_k_equals_zero", "code": "@pytest.mark.parametrize('k', [0])\n@pytest.mark.parametrize('batch_size', [1, 2, 32])\n@pytest.mark.parametrize(\"acceptance_sampler_method\",\n [\"rejection_sampler\", \"typical_acceptance_sampler\"])\n@torch.inference_mode()\ndef test_k_equals_zero(k: int, batch_size: int,\n acceptance_sampler_method: str):\n \"\"\"Verify that the SpecDecodeWorker calls the draft and target workers\n when k is zero. This happens during prefill.\n \"\"\"\n draft_worker = mock_worker(cls=MultiStepWorker)\n target_worker = mock_worker()\n metrics_collector = MagicMock(spec=AsyncMetricsCollector)\n\n sampler_output = MagicMock(spec=SamplerOutput)\n sampler_output.hidden_states = None\n target_worker.execute_model.return_value = [sampler_output]\n\n draft_worker.device = 'cuda'\n target_worker.device = 'cuda'\n\n set_random_seed(1)\n\n worker = SpecDecodeWorker(\n proposer_worker=draft_worker,\n scorer_worker=target_worker,\n spec_decode_sampler=mock_spec_decode_sampler(\n acceptance_sampler_method),\n disable_logprobs=False,\n metrics_collector=metrics_collector,\n )\n\n seq_group_metadata_list, _, _ = create_batch(batch_size,\n k,\n prev_output_token_len=0)\n execute_model_req = ExecuteModelRequest(\n seq_group_metadata_list=seq_group_metadata_list, num_lookahead_slots=k)\n\n out = worker.execute_model(execute_model_req=execute_model_req)\n\n assert len(out) == 1, f\"expected only one token output when {k=}\"\n assert out[0].sampled_token_probs is None, (\n \"expect gpu tensor references to be None\")\n assert out[\n 0].sampled_token_ids is None, \"expect gpu tensor references to be None\"\n\n draft_worker.execute_model.assert_called_once_with(execute_model_req)\n target_worker.execute_model.assert_called_once_with(execute_model_req)", "docstring": "\"\"\"Verify that the SpecDecodeWorker calls the draft and target workers\n when k is zero. This happens during prefill.\n \"\"\"", "url": "https://github.com/vllm-project/vllm/blob/c9e2d644e728e8e93ef2871276ed7a6b39c1d0eb/tests/spec_decode/test_spec_decode_worker.py#L474-L521", "sha": "c9e2d644e728e8e93ef2871276ed7a6b39c1d0eb", "code/function": "@pytest.mark.parametrize('k', [0])\n@pytest.mark.parametrize('batch_size', [1, 2, 32])\n@pytest.mark.parametrize(\"acceptance_sampler_method\",\n [\"rejection_sampler\", \"typical_acceptance_sampler\"])\n@torch.inference_mode()\ndef test_k_equals_zero(k: int, batch_size: int,\n acceptance_sampler_method: str):\n \n draft_worker = mock_worker(cls=MultiStepWorker)\n target_worker = mock_worker()\n metrics_collector = MagicMock(spec=AsyncMetricsCollector)\n\n sampler_output = MagicMock(spec=SamplerOutput)\n sampler_output.hidden_states = None\n target_worker.execute_model.return_value = [sampler_output]\n\n draft_worker.device = 'cuda'\n target_worker.device = 'cuda'\n\n set_random_seed(1)\n\n worker = SpecDecodeWorker(\n proposer_worker=draft_worker,\n scorer_worker=target_worker,\n spec_decode_sampler=mock_spec_decode_sampler(\n acceptance_sampler_method),\n disable_logprobs=False,\n metrics_collector=metrics_collector,\n )\n\n seq_group_metadata_list, _, _ = create_batch(batch_size,\n k,\n prev_output_token_len=0)\n execute_model_req = ExecuteModelRequest(\n seq_group_metadata_list=seq_group_metadata_list, num_lookahead_slots=k)\n\n out = worker.execute_model(execute_model_req=execute_model_req)\n\n assert len(out) == 1, f\"expected only one token output when {k=}\"\n assert out[0].sampled_token_probs is None, (\n \"expect gpu tensor references to be None\")\n assert out[\n 0].sampled_token_ids is None, \"expect gpu tensor references to be None\"\n\n draft_worker.execute_model.assert_called_once_with(execute_model_req)\n target_worker.execute_model.assert_called_once_with(execute_model_req)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mod2pi", "code": "def mod2pi(self, theta: float) -> float:\n\t\t\"\"\"\n\t\tPerform modulus operation on 2π.\n\t\t\"\"\"\n\t\treturn theta - 2.0 * math.pi * math.floor(theta / math.pi / 2.0)", "docstring": "\"\"\"\n\t\tPerform modulus operation on 2π.\n\t\t\"\"\"", "url": "https://github.com/ai-winter/python_motion_planning/blob/dc5f45c42488383a488ace21495e70e8129a2b55/python_motion_planning/curve_generation/curve.py#L51-L55", "sha": "dc5f45c42488383a488ace21495e70e8129a2b55", "code/function": "def mod2pi(self, theta: float) -> float:\n\t\t\n\t\treturn theta - 2.0 * math.pi * math.floor(theta / math.pi / 2.0)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "clear", "code": "@env.group(name=\"clear\", short_help=\"Clear an environment directory.\", context_settings=context_settings)\ndef clear() -> None:\n \"\"\"Clear an environment directory.\"\"\"", "docstring": "\"\"\"Clear an environment directory.\"\"\"", "url": "https://github.com/devine-dl/devine/blob/09eda168824157851e30003b196f4851298ec3ac/devine/commands/env.py#L66-L68", "sha": "09eda168824157851e30003b196f4851298ec3ac", "code/function": "@env.group(name=\"clear\", short_help=\"Clear an environment directory.\", context_settings=context_settings)\ndef clear() -> None:"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "checkForNCNN", "code": "def checkForNCNN() -> bool:\n \"\"\"\n function that checks if the pytorch backend is available\n \"\"\"\n try:\n from rife_ncnn_vulkan_python import Rife\n import ncnn\n\n try:\n from upscale_ncnn_py import UPSCALE\n except Exception:\n printAndLog(\n \"Warning: Cannot import upscale_ncnn, falling back to default ncnn processing. (Please install vcredlist on your computer to fix this!)\"\n )\n return True\n except ImportError as e:\n log(str(e))\n return False\n except Exception as e:\n log(str(e))", "docstring": "\"\"\"\n function that checks if the pytorch backend is available\n \"\"\"", "url": "https://github.com/TNTwise/REAL-Video-Enhancer/blob/46bda804944d050db1413ea2493b15a295e13441/backend/src/utils/Util.py#L270-L289", "sha": "46bda804944d050db1413ea2493b15a295e13441", "code/function": "def checkForNCNN() -> bool:\n \n try:\n from rife_ncnn_vulkan_python import Rife\n import ncnn\n\n try:\n from upscale_ncnn_py import UPSCALE\n except Exception:\n printAndLog(\n \"Warning: Cannot import upscale_ncnn, falling back to default ncnn processing. (Please install vcredlist on your computer to fix this!)\"\n )\n return True\n except ImportError as e:\n log(str(e))\n return False\n except Exception as e:\n log(str(e))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_to_a3m", "code": "def _to_a3m(sequences: Sequence[str]) -> str:\n \"\"\"Converts sequences to an a3m file.\"\"\"\n names = [\"sequence %d\" % i for i in range(1, len(sequences) + 1)]\n a3m = []\n for sequence, name in zip(sequences, names):\n a3m.append(u\">\" + name + u\"\\n\")\n a3m.append(sequence + u\"\\n\")\n return \"\".join(a3m)", "docstring": "\"\"\"Converts sequences to an a3m file.\"\"\"", "url": "https://github.com/jasonkyuyim/se3_diffusion/blob/53359d71cfabc819ffaa571abd2cef736c871a5d/openfold/data/tools/kalign.py#L26-L33", "sha": "53359d71cfabc819ffaa571abd2cef736c871a5d", "code/function": "def _to_a3m(sequences: Sequence[str]) -> str:\n \n names = [\"sequence %d\" % i for i in range(1, len(sequences) + 1)]\n a3m = []\n for sequence, name in zip(sequences, names):\n a3m.append(u\">\" + name + u\"\\n\")\n a3m.append(sequence + u\"\\n\")\n return \"\".join(a3m)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "async_turn_on", "code": "async def async_turn_on(self, **kwargs) -> None:\n \"\"\"Turn light on.\"\"\"\n\n if not self.coordinator.client.use_ble_relay:\n raise HomeAssistantError(f'A PetKit BLE relay is required to control {self.wf_data.data[\"name\"]}')\n if not self.wf_data.group_relay:\n raise HomeAssistantError(\n f'A PetKit BLE relay is required to control {self.wf_data.data[\"name\"]}. '\n f'PetKit did not return a valid relay device. If you do have a relay device, '\n f'it may temporarily be offline.'\n )\n\n try:\n await self.coordinator.client.control_water_fountain(self.wf_data, W5Command.LIGHT_ON)\n except BluetoothError:\n raise PetKitBluetoothError(f'Bluetooth connection to {self.wf_data.data[\"name\"]} failed. Please try turning on the light again.')\n else:\n self.wf_data.data['settings']['lampRingSwitch'] = 1\n self.async_write_ha_state()\n await asyncio.sleep(1)\n await self.coordinator.async_request_refresh()", "docstring": "\"\"\"Turn light on.\"\"\"", "url": "https://github.com/RobertD502/home-assistant-petkit/blob/a7e5d617678bca56466e3571f72f60dce4aa98bd/custom_components/petkit/switch.py#L171-L191", "sha": "a7e5d617678bca56466e3571f72f60dce4aa98bd", "code/function": "async def async_turn_on(self, **kwargs) -> None:\n \n\n if not self.coordinator.client.use_ble_relay:\n raise HomeAssistantError(f'A PetKit BLE relay is required to control {self.wf_data.data[\"name\"]}')\n if not self.wf_data.group_relay:\n raise HomeAssistantError(\n f'A PetKit BLE relay is required to control {self.wf_data.data[\"name\"]}. '\n f'PetKit did not return a valid relay device. If you do have a relay device, '\n f'it may temporarily be offline.'\n )\n\n try:\n await self.coordinator.client.control_water_fountain(self.wf_data, W5Command.LIGHT_ON)\n except BluetoothError:\n raise PetKitBluetoothError(f'Bluetooth connection to {self.wf_data.data[\"name\"]} failed. Please try turning on the light again.')\n else:\n self.wf_data.data['settings']['lampRingSwitch'] = 1\n self.async_write_ha_state()\n await asyncio.sleep(1)\n await self.coordinator.async_request_refresh()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ManualFeed.native_value", "code": "@property\n def native_value(self) -> str:\n \"\"\"Always reset to 0,0\"\"\"\n\n return \"0,0\"", "docstring": "\"\"\"Always reset to 0,0\"\"\"", "url": "https://github.com/RobertD502/home-assistant-petkit/blob/a7e5d617678bca56466e3571f72f60dce4aa98bd/custom_components/petkit/text.py#L114-L118", "sha": "a7e5d617678bca56466e3571f72f60dce4aa98bd", "code/function": "@property\n def native_value(self) -> str:\n \n\n return \"0,0\""} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_init_branch", "code": "def _init_branch(self,):\n \"\"\"Initialize classification branch and regression branch of head.\"\"\"\n\n fc_cls = Linear(self.embed_dims*self.bbox_size, self.cls_out_channels)\n # fc_cls = Linear(self.embed_dims, self.cls_out_channels)\n\n reg_branch = []\n for _ in range(self.num_reg_fcs):\n reg_branch.append(Linear(self.embed_dims, self.embed_dims))\n reg_branch.append(nn.LayerNorm(self.embed_dims))\n reg_branch.append(nn.ReLU())\n\n if self.discrete_output:\n reg_branch.append(nn.Linear(\n self.embed_dims, max(self.canvas_size), bias=True,))\n else:\n reg_branch.append(nn.Linear(\n self.embed_dims, self.coord_dim, bias=True,))\n\n reg_branch = nn.Sequential(*reg_branch)\n # add sigmoid or not\n\n def _get_clones(module, N):\n return nn.ModuleList([copy.deepcopy(module) for i in range(N)])\n\n num_pred = self.transformer.decoder.num_layers\n\n if self.iterative:\n fc_cls = _get_clones(fc_cls, num_pred)\n reg_branch = _get_clones(reg_branch, num_pred)\n else:\n reg_branch = nn.ModuleList(\n [reg_branch for _ in range(num_pred)])\n fc_cls = nn.ModuleList(\n [fc_cls for _ in range(num_pred)])\n\n self.pre_branches = nn.ModuleDict([\n ('cls', fc_cls),\n ('reg', reg_branch), ])", "docstring": "\"\"\"Initialize classification branch and regression branch of head.\"\"\"", "url": "https://github.com/Tsinghua-MARS-Lab/Online-HD-Map-Construction-CVPR2023/blob/775b203aeab56be4248fb5495be452fa404bd29d/src/models/heads/map_element_detector.py#L124-L162", "sha": "775b203aeab56be4248fb5495be452fa404bd29d", "code/function": "def _init_branch(self,):\n \n\n fc_cls = Linear(self.embed_dims*self.bbox_size, self.cls_out_channels)\n # fc_cls = Linear(self.embed_dims, self.cls_out_channels)\n\n reg_branch = []\n for _ in range(self.num_reg_fcs):\n reg_branch.append(Linear(self.embed_dims, self.embed_dims))\n reg_branch.append(nn.LayerNorm(self.embed_dims))\n reg_branch.append(nn.ReLU())\n\n if self.discrete_output:\n reg_branch.append(nn.Linear(\n self.embed_dims, max(self.canvas_size), bias=True,))\n else:\n reg_branch.append(nn.Linear(\n self.embed_dims, self.coord_dim, bias=True,))\n\n reg_branch = nn.Sequential(*reg_branch)\n # add sigmoid or not\n\n def _get_clones(module, N):\n return nn.ModuleList([copy.deepcopy(module) for i in range(N)])\n\n num_pred = self.transformer.decoder.num_layers\n\n if self.iterative:\n fc_cls = _get_clones(fc_cls, num_pred)\n reg_branch = _get_clones(reg_branch, num_pred)\n else:\n reg_branch = nn.ModuleList(\n [reg_branch for _ in range(num_pred)])\n fc_cls = nn.ModuleList(\n [fc_cls for _ in range(num_pred)])\n\n self.pre_branches = nn.ModuleDict([\n ('cls', fc_cls),\n ('reg', reg_branch), ])"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_remove_invalid_boxes", "code": "def _remove_invalid_boxes(\n self,\n detected_boxes,\n detected_scores,\n detected_class_labels,\n detected_masks=None,\n ):\n \"\"\"Removes entries with invalid boxes.\n\n A box is invalid if either its xmax is smaller than its xmin, or its ymax\n is smaller than its ymin.\n\n Args:\n detected_boxes: A float numpy array of size [num_boxes, 4] containing box\n coordinates in [ymin, xmin, ymax, xmax] format.\n detected_scores: A float numpy array of size [num_boxes].\n detected_class_labels: A int32 numpy array of size [num_boxes].\n detected_masks: A uint8 numpy array of size [num_boxes, height, width].\n\n Returns:\n valid_detected_boxes: A float numpy array of size [num_valid_boxes, 4]\n containing box coordinates in [ymin, xmin, ymax, xmax] format.\n valid_detected_scores: A float numpy array of size [num_valid_boxes].\n valid_detected_class_labels: A int32 numpy array of size\n [num_valid_boxes].\n valid_detected_masks: A uint8 numpy array of size\n [num_valid_boxes, height, width].\n \"\"\"\n valid_indices = np.logical_and(\n detected_boxes[:, 0] < detected_boxes[:, 2],\n detected_boxes[:, 1] < detected_boxes[:, 3],\n )\n detected_boxes = detected_boxes[valid_indices]\n detected_scores = detected_scores[valid_indices]\n detected_class_labels = detected_class_labels[valid_indices]\n if detected_masks is not None:\n detected_masks = detected_masks[valid_indices]\n return [\n detected_boxes,\n detected_scores,\n detected_class_labels,\n detected_masks,\n ]", "docstring": "\"\"\"Removes entries with invalid boxes.\n\n A box is invalid if either its xmax is smaller than its xmin, or its ymax\n is smaller than its ymin.\n\n Args:\n detected_boxes: A float numpy array of size [num_boxes, 4] containing box\n coordinates in [ymin, xmin, ymax, xmax] format.\n detected_scores: A float numpy array of size [num_boxes].\n detected_class_labels: A int32 numpy array of size [num_boxes].\n detected_masks: A uint8 numpy array of size [num_boxes, height, width].\n\n Returns:\n valid_detected_boxes: A float numpy array of size [num_valid_boxes, 4]\n containing box coordinates in [ymin, xmin, ymax, xmax] format.\n valid_detected_scores: A float numpy array of size [num_valid_boxes].\n valid_detected_class_labels: A int32 numpy array of size\n [num_valid_boxes].\n valid_detected_masks: A uint8 numpy array of size\n [num_valid_boxes, height, width].\n \"\"\"", "url": "https://github.com/yjh0410/YOWOv2/blob/9e8d23c11ad26ef5e6cfc01e3f4f1112dee634bf/evaluator/ava_evaluation/per_image_evaluation.py#L411-L453", "sha": "9e8d23c11ad26ef5e6cfc01e3f4f1112dee634bf", "code/function": "def _remove_invalid_boxes(\n self,\n detected_boxes,\n detected_scores,\n detected_class_labels,\n detected_masks=None,\n ):\n \n valid_indices = np.logical_and(\n detected_boxes[:, 0] < detected_boxes[:, 2],\n detected_boxes[:, 1] < detected_boxes[:, 3],\n )\n detected_boxes = detected_boxes[valid_indices]\n detected_scores = detected_scores[valid_indices]\n detected_class_labels = detected_class_labels[valid_indices]\n if detected_masks is not None:\n detected_masks = detected_masks[valid_indices]\n return [\n detected_boxes,\n detected_scores,\n detected_class_labels,\n detected_masks,\n ]"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "finalize", "code": "def finalize(self):\n \"\"\"Performs a final set of operations after logs have been saved.\"\"\"\n\n return", "docstring": "\"\"\"Performs a final set of operations after logs have been saved.\"\"\"", "url": "https://github.com/hashicorp-forge/grove/blob/a2b4bea0e15ef1a2a80bf092f4a7cb835901ecbd/grove/processors/__init__.py#L54-L57", "sha": "a2b4bea0e15ef1a2a80bf092f4a7cb835901ecbd", "code/function": "def finalize(self):\n \n\n return"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "center_of_mass", "code": "def center_of_mass(mask, esp=1e-6):\n \"\"\"Calculate the centroid coordinates of the mask.\n\n Args:\n mask (Tensor): The mask to be calculated, shape (h, w).\n esp (float): Avoid dividing by zero. Default: 1e-6.\n\n Returns:\n tuple[Tensor]: the coordinates of the center point of the mask.\n\n - center_h (Tensor): the center point of the height.\n - center_w (Tensor): the center point of the width.\n \"\"\"\n h, w = mask.shape\n grid_h = torch.arange(h, device=mask.device)[:, None]\n grid_w = torch.arange(w, device=mask.device)\n normalizer = mask.sum().float().clamp(min=esp)\n center_h = (mask * grid_h).sum() / normalizer\n center_w = (mask * grid_w).sum() / normalizer\n return center_h, center_w", "docstring": "\"\"\"Calculate the centroid coordinates of the mask.\n\n Args:\n mask (Tensor): The mask to be calculated, shape (h, w).\n esp (float): Avoid dividing by zero. Default: 1e-6.\n\n Returns:\n tuple[Tensor]: the coordinates of the center point of the mask.\n\n - center_h (Tensor): the center point of the height.\n - center_w (Tensor): the center point of the width.\n \"\"\"", "url": "https://github.com/zhanggang001/CEDNet/blob/305d6371db427bcf3c93b5f85a2609d05e9eb4f1/ced-mmdet/mmdet/core/utils/misc.py#L168-L187", "sha": "305d6371db427bcf3c93b5f85a2609d05e9eb4f1", "code/function": "def center_of_mass(mask, esp=1e-6):\n \n h, w = mask.shape\n grid_h = torch.arange(h, device=mask.device)[:, None]\n grid_w = torch.arange(w, device=mask.device)\n normalizer = mask.sum().float().clamp(min=esp)\n center_h = (mask * grid_h).sum() / normalizer\n center_w = (mask * grid_w).sum() / normalizer\n return center_h, center_w"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_init_predictor", "code": "def _init_predictor(self):\n \"\"\"Initialize predictor layers of the head.\"\"\"\n self.conv_cls = nn.Conv2d(\n self.feat_channels, self.cls_out_channels, 3, padding=1)\n self.conv_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1)", "docstring": "\"\"\"Initialize predictor layers of the head.\"\"\"", "url": "https://github.com/zhanggang001/CEDNet/blob/305d6371db427bcf3c93b5f85a2609d05e9eb4f1/ced-mmdet/mmdet/models/dense_heads/anchor_free_head.py#L153-L157", "sha": "305d6371db427bcf3c93b5f85a2609d05e9eb4f1", "code/function": "def _init_predictor(self):\n \n self.conv_cls = nn.Conv2d(\n self.feat_channels, self.cls_out_channels, 3, padding=1)\n self.conv_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cty_lookup", "code": "def cty_lookup(self, callsign: str) -> list:\n \"\"\"Lookup callsign in cty.dat file.\n\n Parameters\n ----------\n callsign : str\n callsign to lookup\n\n Returns\n -------\n return : list\n list of dicts containing the callsign and the station.\n \"\"\"\n callsign = callsign.upper()\n for count in reversed(range(len(callsign))):\n searchitem = callsign[: count + 1]\n result = {\n key: val for key, val in self.ctyfile.items() if key == searchitem\n }\n if not result:\n continue\n if result.get(searchitem).get(\"exact_match\"):\n if searchitem == callsign:\n return result\n continue\n return result", "docstring": "\"\"\"Lookup callsign in cty.dat file.\n\n Parameters\n ----------\n callsign : str\n callsign to lookup\n\n Returns\n -------\n return : list\n list of dicts containing the callsign and the station.\n \"\"\"", "url": "https://github.com/mbridak/not1mm/blob/c7dbdaf4e509a865a1bf64bc037e0efac6b4e200/not1mm/__main__.py#L1972-L1997", "sha": "c7dbdaf4e509a865a1bf64bc037e0efac6b4e200", "code/function": "def cty_lookup(self, callsign: str) -> list:\n \n callsign = callsign.upper()\n for count in reversed(range(len(callsign))):\n searchitem = callsign[: count + 1]\n result = {\n key: val for key, val in self.ctyfile.items() if key == searchitem\n }\n if not result:\n continue\n if result.get(searchitem).get(\"exact_match\"):\n if searchitem == callsign:\n return result\n continue\n return result"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "change_mode", "code": "def change_mode(self, mode: str, intended_freq=None) -> None:\n \"\"\"\n Change mode to given mode.\n Send the new mode to the rig control.\n Set the band indicator.\n Set the window title.\n Clear the inputs.\n Read the CW macros.\n\n Parameters\n ----------\n mode : str\n Mode to change to.\n\n Returns\n -------\n None\n \"\"\"\n if mode in (\"CW\", \"CW-U\", \"CW-L\", \"CWR\"):\n if self.rig_control and self.rig_control.online:\n self.rig_control.set_mode(self.rig_control.last_cw_mode)\n if self.pref.get(\"cwtype\") == 3 and self.rig_control is not None:\n if self.rig_control.interface == \"flrig\":\n self.cwspeed_spinbox_changed()\n self.rig_control.cat.set_flrig_cw_send(True)\n else:\n self.setmode(\"CW\")\n self.radio_state[\"mode\"] = \"CW\"\n band = getband(str(self.radio_state.get(\"vfoa\", \"0.0\")))\n self.set_band_indicator(band)\n self.set_window_title()\n self.clearinputs()\n self.read_macros()\n return\n if mode in (\n \"DIGI-U\",\n \"DIGI-L\",\n \"RTTY\",\n \"RTTY-R\",\n \"LSB-D\",\n \"USB-D\",\n \"AM-D\",\n \"FM-D\",\n ):\n if self.rig_control and self.rig_control.online:\n self.rig_control.set_mode(self.rig_control.last_data_mode)\n else:\n self.radio_state[\"mode\"] = \"RTTY\"\n self.setmode(\"RTTY\")\n band = getband(str(self.radio_state.get(\"vfoa\", \"0.0\")))\n self.set_band_indicator(band)\n self.set_window_title()\n self.clearinputs()\n self.read_macros()\n return\n if mode == \"SSB\":\n if intended_freq:\n freq = intended_freq\n else:\n freq = int(self.radio_state.get(\"vfoa\", 0))\n\n if freq > 10000000:\n self.radio_state[\"mode\"] = \"USB\"\n else:\n self.radio_state[\"mode\"] = \"LSB\"\n\n if self.rig_control and self.rig_control.online:\n self.rig_control.set_mode(self.radio_state.get(\"mode\"))\n else:\n self.setmode(\"SSB\")\n band = getband(str(self.radio_state.get(\"vfoa\", \"0.0\")))\n self.set_band_indicator(band)\n self.set_window_title()\n self.clearinputs()\n self.read_macros()", "docstring": "\"\"\"\n Change mode to given mode.\n Send the new mode to the rig control.\n Set the band indicator.\n Set the window title.\n Clear the inputs.\n Read the CW macros.\n\n Parameters\n ----------\n mode : str\n Mode to change to.\n\n Returns\n -------\n None\n \"\"\"", "url": "https://github.com/mbridak/not1mm/blob/c7dbdaf4e509a865a1bf64bc037e0efac6b4e200/not1mm/__main__.py#L3433-L3507", "sha": "c7dbdaf4e509a865a1bf64bc037e0efac6b4e200", "code/function": "def change_mode(self, mode: str, intended_freq=None) -> None:\n \n if mode in (\"CW\", \"CW-U\", \"CW-L\", \"CWR\"):\n if self.rig_control and self.rig_control.online:\n self.rig_control.set_mode(self.rig_control.last_cw_mode)\n if self.pref.get(\"cwtype\") == 3 and self.rig_control is not None:\n if self.rig_control.interface == \"flrig\":\n self.cwspeed_spinbox_changed()\n self.rig_control.cat.set_flrig_cw_send(True)\n else:\n self.setmode(\"CW\")\n self.radio_state[\"mode\"] = \"CW\"\n band = getband(str(self.radio_state.get(\"vfoa\", \"0.0\")))\n self.set_band_indicator(band)\n self.set_window_title()\n self.clearinputs()\n self.read_macros()\n return\n if mode in (\n \"DIGI-U\",\n \"DIGI-L\",\n \"RTTY\",\n \"RTTY-R\",\n \"LSB-D\",\n \"USB-D\",\n \"AM-D\",\n \"FM-D\",\n ):\n if self.rig_control and self.rig_control.online:\n self.rig_control.set_mode(self.rig_control.last_data_mode)\n else:\n self.radio_state[\"mode\"] = \"RTTY\"\n self.setmode(\"RTTY\")\n band = getband(str(self.radio_state.get(\"vfoa\", \"0.0\")))\n self.set_band_indicator(band)\n self.set_window_title()\n self.clearinputs()\n self.read_macros()\n return\n if mode == \"SSB\":\n if intended_freq:\n freq = intended_freq\n else:\n freq = int(self.radio_state.get(\"vfoa\", 0))\n\n if freq > 10000000:\n self.radio_state[\"mode\"] = \"USB\"\n else:\n self.radio_state[\"mode\"] = \"LSB\"\n\n if self.rig_control and self.rig_control.online:\n self.rig_control.set_mode(self.radio_state.get(\"mode\"))\n else:\n self.setmode(\"SSB\")\n band = getband(str(self.radio_state.get(\"vfoa\", \"0.0\")))\n self.set_band_indicator(band)\n self.set_window_title()\n self.clearinputs()\n self.read_macros()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "preprocess", "code": "def preprocess(\n img: np.ndarray, out_bbox, input_size: Tuple[int, int] = (192, 256)\n) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:\n \"\"\"Do preprocessing for DWPose model inference.\n\n Args:\n img (np.ndarray): Input image in shape.\n input_size (tuple): Input image size in shape (w, h).\n\n Returns:\n tuple:\n - resized_img (np.ndarray): Preprocessed image.\n - center (np.ndarray): Center of image.\n - scale (np.ndarray): Scale of image.\n \"\"\"\n # get shape of image\n img_shape = img.shape[:2]\n out_img, out_center, out_scale = [], [], []\n if len(out_bbox) == 0:\n out_bbox = [[0, 0, img_shape[1], img_shape[0]]]\n for i in range(len(out_bbox)):\n x0 = out_bbox[i][0]\n y0 = out_bbox[i][1]\n x1 = out_bbox[i][2]\n y1 = out_bbox[i][3]\n bbox = np.array([x0, y0, x1, y1])\n\n # get center and scale\n center, scale = bbox_xyxy2cs(bbox, padding=1.25)\n\n # do affine transformation\n resized_img, scale = top_down_affine(input_size, scale, center, img)\n\n # normalize image\n mean = np.array([123.675, 116.28, 103.53])\n std = np.array([58.395, 57.12, 57.375])\n resized_img = (resized_img - mean) / std\n\n out_img.append(resized_img)\n out_center.append(center)\n out_scale.append(scale)\n\n return out_img, out_center, out_scale", "docstring": "\"\"\"Do preprocessing for DWPose model inference.\n\n Args:\n img (np.ndarray): Input image in shape.\n input_size (tuple): Input image size in shape (w, h).\n\n Returns:\n tuple:\n - resized_img (np.ndarray): Preprocessed image.\n - center (np.ndarray): Center of image.\n - scale (np.ndarray): Scale of image.\n \"\"\"", "url": "https://github.com/Mikubill/sd-webui-controlnet/blob/56cec5b2958edf3b1807b7e7b2b1b5186dbd2f81/annotator/openpose/cv_ox_pose.py#L6-L48", "sha": "56cec5b2958edf3b1807b7e7b2b1b5186dbd2f81", "code/function": "def preprocess(\n img: np.ndarray, out_bbox, input_size: Tuple[int, int] = (192, 256)\n) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:\n \n # get shape of image\n img_shape = img.shape[:2]\n out_img, out_center, out_scale = [], [], []\n if len(out_bbox) == 0:\n out_bbox = [[0, 0, img_shape[1], img_shape[0]]]\n for i in range(len(out_bbox)):\n x0 = out_bbox[i][0]\n y0 = out_bbox[i][1]\n x1 = out_bbox[i][2]\n y1 = out_bbox[i][3]\n bbox = np.array([x0, y0, x1, y1])\n\n # get center and scale\n center, scale = bbox_xyxy2cs(bbox, padding=1.25)\n\n # do affine transformation\n resized_img, scale = top_down_affine(input_size, scale, center, img)\n\n # normalize image\n mean = np.array([123.675, 116.28, 103.53])\n std = np.array([58.395, 57.12, 57.375])\n resized_img = (resized_img - mean) / std\n\n out_img.append(resized_img)\n out_center.append(center)\n out_scale.append(scale)\n\n return out_img, out_center, out_scale"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self):\n \"\"\"\n Init method.\n \"\"\"\n super().__init__()", "docstring": "\"\"\"\n Init method.\n \"\"\"", "url": "https://github.com/Mikubill/sd-webui-controlnet/blob/56cec5b2958edf3b1807b7e7b2b1b5186dbd2f81/annotator/teed/Xsmish.py#L33-L37", "sha": "56cec5b2958edf3b1807b7e7b2b1b5186dbd2f81", "code/function": "def __init__(self):\n \n super().__init__()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward", "code": "def forward(self, coeffs):\n \"\"\"\n Args:\n coeffs (yl, yh): tuple of lowpass and bandpass coefficients, should\n match the format returned by DWT1DForward.\n\n Returns:\n Reconstructed input of shape :math:`(N, C_{in}, L_{in})`\n\n Note:\n Can have None for any of the highpass scales and will treat the\n values as zeros (not in an efficient way though).\n \"\"\"\n x0, highs = coeffs\n assert x0.ndim == 3, \"Can only handle 3d inputs (N, C, L)\"\n mode = mode_to_int(self.mode)\n # Do a multilevel inverse transform\n for x1 in highs[::-1]:\n if x1 is None:\n x1 = torch.zeros_like(x0)\n\n # 'Unpad' added signal\n if x0.shape[-1] > x1.shape[-1]:\n x0 = x0[..., :-1]\n x0 = SFB1D.apply(x0, x1, self.g0, self.g1, mode, self.use_amp)\n return x0", "docstring": "\"\"\"\n Args:\n coeffs (yl, yh): tuple of lowpass and bandpass coefficients, should\n match the format returned by DWT1DForward.\n\n Returns:\n Reconstructed input of shape :math:`(N, C_{in}, L_{in})`\n\n Note:\n Can have None for any of the highpass scales and will treat the\n values as zeros (not in an efficient way though).\n \"\"\"", "url": "https://github.com/thuml/Time-Series-Library/blob/4ddf869d999424b037d451a4757e36813d66a13b/layers/DWT_Decomposition.py#L224-L249", "sha": "4ddf869d999424b037d451a4757e36813d66a13b", "code/function": "def forward(self, coeffs):\n \n x0, highs = coeffs\n assert x0.ndim == 3, \"Can only handle 3d inputs (N, C, L)\"\n mode = mode_to_int(self.mode)\n # Do a multilevel inverse transform\n for x1 in highs[::-1]:\n if x1 is None:\n x1 = torch.zeros_like(x0)\n\n # 'Unpad' added signal\n if x0.shape[-1] > x1.shape[-1]:\n x0 = x0[..., :-1]\n x0 = SFB1D.apply(x0, x1, self.g0, self.g1, mode, self.use_amp)\n return x0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_auth_args", "code": "def get_auth_args():\n \"\"\"\n Generates a parser which is used to get all of the authentication information\n \"\"\"\n parser = ArgumentParser(\n description='TikTok Auth is a program which can log you into multiple accounts sequentially'\n )\n\n # authentication arguments\n parser.add_argument('-o', '--output', default='tmp',\n help='The output folder to save the cookies to')\n parser.add_argument('-i', '--input', help='A csv file with username and password')\n # parser.add_argument('-h', '--header', default=True,\n # help='The header of the csv file which contains the username and password')\n parser.add_argument('-u', '--username', help='Your TikTok email / username')\n parser.add_argument('-p', '--password', help='Your TikTok password')\n\n return parser.parse_args()", "docstring": "\"\"\"\n Generates a parser which is used to get all of the authentication information\n \"\"\"", "url": "https://github.com/wkaisertexas/tiktok-uploader/blob/59dc97852b70a2f13be6a48c6046f089baa1054e/src/tiktok_uploader/cli.py#L112-L129", "sha": "59dc97852b70a2f13be6a48c6046f089baa1054e", "code/function": "def get_auth_args():\n \n parser = ArgumentParser(\n description='TikTok Auth is a program which can log you into multiple accounts sequentially'\n )\n\n # authentication arguments\n parser.add_argument('-o', '--output', default='tmp',\n help='The output folder to save the cookies to')\n parser.add_argument('-i', '--input', help='A csv file with username and password')\n # parser.add_argument('-h', '--header', default=True,\n # help='The header of the csv file which contains the username and password')\n parser.add_argument('-u', '--username', help='Your TikTok email / username')\n parser.add_argument('-p', '--password', help='Your TikTok password')\n\n return parser.parse_args()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_transfer_statistics", "code": "def get_transfer_statistics(self, data=None):\n \"\"\"\n 查询转移历史统计数据\n \"\"\"\n MovieChartLabels = []\n MovieNums = []\n TvChartData = {}\n TvNums = []\n AnimeNums = []\n for statistic in self.dbhelper.get_transfer_statistics():\n if statistic[0] == \"电影\":\n MovieChartLabels.append(statistic[1])\n MovieNums.append(statistic[2])\n else:\n if not TvChartData.get(statistic[1]):\n TvChartData[statistic[1]] = {\"tv\": 0, \"anime\": 0}\n if statistic[0] == \"电视剧\":\n TvChartData[statistic[1]][\"tv\"] += statistic[2]\n elif statistic[0] == \"动漫\":\n TvChartData[statistic[1]][\"anime\"] += statistic[2]\n TvChartLabels = list(TvChartData)\n for tv_data in TvChartData.values():\n TvNums.append(tv_data.get(\"tv\"))\n AnimeNums.append(tv_data.get(\"anime\"))\n\n return {\n \"code\": 0,\n \"MovieChartLabels\": MovieChartLabels,\n \"MovieNums\": MovieNums,\n \"TvChartLabels\": TvChartLabels,\n \"TvNums\": TvNums,\n \"AnimeNums\": AnimeNums\n }", "docstring": "\"\"\"\n 查询转移历史统计数据\n \"\"\"", "url": "https://github.com/mhdpdp/nastools/blob/b9fa7cad74649fa4cd70e8bde378aa02a2d27c29/web/action.py#L3396-L3428", "sha": "b9fa7cad74649fa4cd70e8bde378aa02a2d27c29", "code/function": "def get_transfer_statistics(self, data=None):\n \n MovieChartLabels = []\n MovieNums = []\n TvChartData = {}\n TvNums = []\n AnimeNums = []\n for statistic in self.dbhelper.get_transfer_statistics():\n if statistic[0] == \"电影\":\n MovieChartLabels.append(statistic[1])\n MovieNums.append(statistic[2])\n else:\n if not TvChartData.get(statistic[1]):\n TvChartData[statistic[1]] = {\"tv\": 0, \"anime\": 0}\n if statistic[0] == \"电视剧\":\n TvChartData[statistic[1]][\"tv\"] += statistic[2]\n elif statistic[0] == \"动漫\":\n TvChartData[statistic[1]][\"anime\"] += statistic[2]\n TvChartLabels = list(TvChartData)\n for tv_data in TvChartData.values():\n TvNums.append(tv_data.get(\"tv\"))\n AnimeNums.append(tv_data.get(\"anime\"))\n\n return {\n \"code\": 0,\n \"MovieChartLabels\": MovieChartLabels,\n \"MovieNums\": MovieNums,\n \"TvChartLabels\": TvChartLabels,\n \"TvNums\": TvNums,\n \"AnimeNums\": AnimeNums\n }"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FilterRuleUpdate.post", "code": "@filterrule.doc(parser=parser)\n def post(self):\n \"\"\"\n 新增/修改规则\n \"\"\"\n return WebAction().api_action(cmd='add_filterrule', data=self.parser.parse_args())", "docstring": "\"\"\"\n 新增/修改规则\n \"\"\"", "url": "https://github.com/mhdpdp/nastools/blob/b9fa7cad74649fa4cd70e8bde378aa02a2d27c29/web/apiv1.py#L1803-L1808", "sha": "b9fa7cad74649fa4cd70e8bde378aa02a2d27c29", "code/function": "@filterrule.doc(parser=parser)\n def post(self):\n \n return WebAction().api_action(cmd='add_filterrule', data=self.parser.parse_args())"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DeviceType.is_luba1", "code": "@staticmethod\n def is_luba1(device_name: str, product_key: str = \"\"):\n \"\"\"Check if the given device is of type LUBA.\n\n This function determines if the device specified by 'device_name' is of\n type LUBA. If 'product_key' is provided, it is used to further identify\n the device type.\n\n Args:\n device_name (str): The name of the device.\n product_key (str?): The product key associated with the device. Defaults to \"\".\n\n Returns:\n bool: True if the device is of type LUBA, False otherwise.\n\n \"\"\"\n\n if not product_key:\n device_type = DeviceType.value_of_str(device_name)\n else:\n device_type = DeviceType.value_of_str(device_name, product_key)\n\n return device_type.get_value() == DeviceType.LUBA.get_value()", "docstring": "\"\"\"Check if the given device is of type LUBA.\n\n This function determines if the device specified by 'device_name' is of\n type LUBA. If 'product_key' is provided, it is used to further identify\n the device type.\n\n Args:\n device_name (str): The name of the device.\n product_key (str?): The product key associated with the device. Defaults to \"\".\n\n Returns:\n bool: True if the device is of type LUBA, False otherwise.\n\n \"\"\"", "url": "https://github.com/mikey0000/PyMammotion/blob/2c5e100b8cc50d393ff5ec460c612e1da5ec4935/pymammotion/utility/device_type.py#L127-L149", "sha": "2c5e100b8cc50d393ff5ec460c612e1da5ec4935", "code/function": "@staticmethod\n def is_luba1(device_name: str, product_key: str = \"\"):\n \n\n if not product_key:\n device_type = DeviceType.value_of_str(device_name)\n else:\n device_type = DeviceType.value_of_str(device_name, product_key)\n\n return device_type.get_value() == DeviceType.LUBA.get_value()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "update", "code": "def update(oids, hids, dists, indices, events, m):\n \"\"\"\n tracks : my results of shape {tr_id: {'id', 'im_index', 'max_iou', 'bbox'}\n num_frames : number of frames\n \"\"\"\n import pandas as pd\n asso = dict()\n hypo = dict()\n cols = ['Type', 'id', 'frame', 'tr_id', 'iou', 'w', 'h']\n events = pd.DataFrame(columns=cols)\n for i in range(num_frames):\n for tr_id, tr in self.tracks.items():\n for t in tr:\n if t['im_index'] == i:\n if t['id'] in asso.keys():\n h_o = asso[t['id']]\n if tr_id == h_o:\n TYPE = 'match'\n else:\n if h_o in hypo.keys():\n TYPE = 'switch'\n else:\n TYPE = 'ascend'\n elif tr_id == -1:\n TYPE = 'fp'\n elif tr_id in hypo.keys():\n if t['id'] not in asso.keys():\n TYPE = 'migrate'\n elif hypo[tr_id] != t['id']:\n TYPE = 'transfer'\n df = pd.DataFrame([[TYPE, t['id'], i, tr_id, t['iou'], t['bbox']\n [2]-t['bbox'][0], t['bbox'][3]-t['bbox'][1]]], columns=cols)\n\n #self.dirty_events = True\n oids = np.asarray(oids)\n oids_masked = np.zeros_like(oids, dtype=np.bool)\n hids = np.asarray(hids)\n hids_masked = np.zeros_like(hids, dtype=np.bool)\n dists = np.atleast_2d(dists).astype(float).reshape(\n oids.shape[0], hids.shape[0]).copy()\n\n if frameid is None:\n assert self.auto_id, 'auto-id is not enabled'\n if len(self._indices['FrameId']) > 0:\n frameid = self._indices['FrameId'][-1] + 1\n else:\n frameid = 0\n else:\n assert not self.auto_id, 'Cannot provide frame id when auto-id is enabled'\n\n eid = itertools.count()\n\n # 0. Record raw events\n\n no = len(oids)\n nh = len(hids)\n\n # Add a RAW event simply to ensure the frame is counted.\n indices.append(frameid, next(eid))\n events.append('RAW', np.nan, np.nan, np.nan)\n\n # There must be at least one RAW event per object and hypothesis.\n # Record all finite distances as RAW events.\n valid_i, valid_j = np.where(np.isfinite(dists))\n valid_dists = dists[valid_i, valid_j]\n for i, j, dist_ij in zip(valid_i, valid_j, valid_dists):\n indices.append(frameid, next(eid))\n events.append('RAW', oids[i], hids[j], dist_ij)\n # Add a RAW event for objects and hypotheses that were present but did\n # not overlap with anything.\n used_i = np.unique(valid_i)\n used_j = np.unique(valid_j)\n unused_i = np.setdiff1d(np.arange(no), used_i)\n unused_j = np.setdiff1d(np.arange(nh), used_j)\n for oid in oids[unused_i]:\n indices.append(frameid, next(eid))\n events.append('RAW', oid, np.nan, np.nan)\n for hid in hids[unused_j]:\n indices.append(frameid, next(eid))\n events.append('RAW', np.nan, hid, np.nan)\n\n if oids.size * hids.size > 0:\n # 1. Try to re-establish tracks from previous correspondences\n for i in range(oids.shape[0]):\n # No need to check oids_masked[i] here.\n if oids[i] not in self.m:\n continue\n\n hprev = self.m[oids[i]]\n j, = np.where(~hids_masked & (hids == hprev))\n if j.shape[0] == 0:\n continue\n j = j[0]\n\n if np.isfinite(dists[i, j]):\n o = oids[i]\n h = hids[j]\n oids_masked[i] = True\n hids_masked[j] = True\n self.m[oids[i]] = hids[j]\n\n indices.append(frameid, next(eid))\n events.append('MATCH', oids[i], hids[j], dists[i, j])\n self.last_match[o] = frameid\n self.hypHistory[h] = frameid\n\n # 2. Try to remaining objects/hypotheses\n dists[oids_masked, :] = np.nan\n dists[:, hids_masked] = np.nan\n\n rids, cids = linear_sum_assignment(dists)\n\n for i, j in zip(rids, cids):\n if not np.isfinite(dists[i, j]):\n continue\n\n o = oids[i]\n h = hids[j]\n is_switch = (o in self.m and\n self.m[o] != h and\n abs(frameid - self.last_occurrence[o]) <= self.max_switch_time)\n cat1 = 'SWITCH' if is_switch else 'MATCH'\n if cat1 == 'SWITCH':\n if h not in self.hypHistory:\n subcat = 'ASCEND'\n indices.append(frameid, next(eid))\n events.append(subcat, oids[i], hids[j], dists[i, j])\n # ignore the last condition temporarily\n is_transfer = (h in self.res_m and\n self.res_m[h] != o)\n # is_transfer = (h in self.res_m and\n # self.res_m[h] != o and\n # abs(frameid - self.last_occurrence[o]) <= self.max_switch_time)\n cat2 = 'TRANSFER' if is_transfer else 'MATCH'\n if cat2 == 'TRANSFER':\n if o not in self.last_match:\n subcat = 'MIGRATE'\n indices.append(frameid, next(eid))\n events.append(subcat, oids[i], hids[j], dists[i, j])\n indices.append(frameid, next(eid))\n events.append(cat2, oids[i], hids[j], dists[i, j])\n if vf != '' and (cat1 != 'MATCH' or cat2 != 'MATCH'):\n if cat1 == 'SWITCH':\n vf.write('%s %d %d %d %d %d\\n' % (\n subcat[:2], o, self.last_match[o], self.m[o], frameid, h))\n if cat2 == 'TRANSFER':\n vf.write('%s %d %d %d %d %d\\n' % (\n subcat[:2], h, self.hypHistory[h], self.res_m[h], frameid, o))\n self.hypHistory[h] = frameid\n self.last_match[o] = frameid\n indices.append(frameid, next(eid))\n events.append(cat1, oids[i], hids[j], dists[i, j])\n oids_masked[i] = True\n hids_masked[j] = True\n self.m[o] = h\n self.res_m[h] = o\n\n # 3. All remaining objects are missed\n for o in oids[~oids_masked]:\n indices.append(frameid, next(eid))\n events.append('MISS', o, np.nan, np.nan)\n if vf != '':\n vf.write('FN %d %d\\n' % (frameid, o))\n\n # 4. All remaining hypotheses are false alarms\n for h in hids[~hids_masked]:\n indices.append(frameid, next(eid))\n events.append('FP', np.nan, h, np.nan)\n if vf != '':\n vf.write('FP %d %d\\n' % (frameid, h))\n\n # 5. Update occurance state\n for o in oids:\n self.last_occurrence[o] = frameid\n\n return frameid", "docstring": "\"\"\"\n tracks : my results of shape {tr_id: {'id', 'im_index', 'max_iou', 'bbox'}\n num_frames : number of frames\n \"\"\"", "url": "https://github.com/dvl-tum/GHOST/blob/755a5dacfcf4dd122a4cac73061b24e9c84f3c19/src/utils.py#L119-L294", "sha": "755a5dacfcf4dd122a4cac73061b24e9c84f3c19", "code/function": "def update(oids, hids, dists, indices, events, m):\n \n import pandas as pd\n asso = dict()\n hypo = dict()\n cols = ['Type', 'id', 'frame', 'tr_id', 'iou', 'w', 'h']\n events = pd.DataFrame(columns=cols)\n for i in range(num_frames):\n for tr_id, tr in self.tracks.items():\n for t in tr:\n if t['im_index'] == i:\n if t['id'] in asso.keys():\n h_o = asso[t['id']]\n if tr_id == h_o:\n TYPE = 'match'\n else:\n if h_o in hypo.keys():\n TYPE = 'switch'\n else:\n TYPE = 'ascend'\n elif tr_id == -1:\n TYPE = 'fp'\n elif tr_id in hypo.keys():\n if t['id'] not in asso.keys():\n TYPE = 'migrate'\n elif hypo[tr_id] != t['id']:\n TYPE = 'transfer'\n df = pd.DataFrame([[TYPE, t['id'], i, tr_id, t['iou'], t['bbox']\n [2]-t['bbox'][0], t['bbox'][3]-t['bbox'][1]]], columns=cols)\n\n #self.dirty_events = True\n oids = np.asarray(oids)\n oids_masked = np.zeros_like(oids, dtype=np.bool)\n hids = np.asarray(hids)\n hids_masked = np.zeros_like(hids, dtype=np.bool)\n dists = np.atleast_2d(dists).astype(float).reshape(\n oids.shape[0], hids.shape[0]).copy()\n\n if frameid is None:\n assert self.auto_id, 'auto-id is not enabled'\n if len(self._indices['FrameId']) > 0:\n frameid = self._indices['FrameId'][-1] + 1\n else:\n frameid = 0\n else:\n assert not self.auto_id, 'Cannot provide frame id when auto-id is enabled'\n\n eid = itertools.count()\n\n # 0. Record raw events\n\n no = len(oids)\n nh = len(hids)\n\n # Add a RAW event simply to ensure the frame is counted.\n indices.append(frameid, next(eid))\n events.append('RAW', np.nan, np.nan, np.nan)\n\n # There must be at least one RAW event per object and hypothesis.\n # Record all finite distances as RAW events.\n valid_i, valid_j = np.where(np.isfinite(dists))\n valid_dists = dists[valid_i, valid_j]\n for i, j, dist_ij in zip(valid_i, valid_j, valid_dists):\n indices.append(frameid, next(eid))\n events.append('RAW', oids[i], hids[j], dist_ij)\n # Add a RAW event for objects and hypotheses that were present but did\n # not overlap with anything.\n used_i = np.unique(valid_i)\n used_j = np.unique(valid_j)\n unused_i = np.setdiff1d(np.arange(no), used_i)\n unused_j = np.setdiff1d(np.arange(nh), used_j)\n for oid in oids[unused_i]:\n indices.append(frameid, next(eid))\n events.append('RAW', oid, np.nan, np.nan)\n for hid in hids[unused_j]:\n indices.append(frameid, next(eid))\n events.append('RAW', np.nan, hid, np.nan)\n\n if oids.size * hids.size > 0:\n # 1. Try to re-establish tracks from previous correspondences\n for i in range(oids.shape[0]):\n # No need to check oids_masked[i] here.\n if oids[i] not in self.m:\n continue\n\n hprev = self.m[oids[i]]\n j, = np.where(~hids_masked & (hids == hprev))\n if j.shape[0] == 0:\n continue\n j = j[0]\n\n if np.isfinite(dists[i, j]):\n o = oids[i]\n h = hids[j]\n oids_masked[i] = True\n hids_masked[j] = True\n self.m[oids[i]] = hids[j]\n\n indices.append(frameid, next(eid))\n events.append('MATCH', oids[i], hids[j], dists[i, j])\n self.last_match[o] = frameid\n self.hypHistory[h] = frameid\n\n # 2. Try to remaining objects/hypotheses\n dists[oids_masked, :] = np.nan\n dists[:, hids_masked] = np.nan\n\n rids, cids = linear_sum_assignment(dists)\n\n for i, j in zip(rids, cids):\n if not np.isfinite(dists[i, j]):\n continue\n\n o = oids[i]\n h = hids[j]\n is_switch = (o in self.m and\n self.m[o] != h and\n abs(frameid - self.last_occurrence[o]) <= self.max_switch_time)\n cat1 = 'SWITCH' if is_switch else 'MATCH'\n if cat1 == 'SWITCH':\n if h not in self.hypHistory:\n subcat = 'ASCEND'\n indices.append(frameid, next(eid))\n events.append(subcat, oids[i], hids[j], dists[i, j])\n # ignore the last condition temporarily\n is_transfer = (h in self.res_m and\n self.res_m[h] != o)\n # is_transfer = (h in self.res_m and\n # self.res_m[h] != o and\n # abs(frameid - self.last_occurrence[o]) <= self.max_switch_time)\n cat2 = 'TRANSFER' if is_transfer else 'MATCH'\n if cat2 == 'TRANSFER':\n if o not in self.last_match:\n subcat = 'MIGRATE'\n indices.append(frameid, next(eid))\n events.append(subcat, oids[i], hids[j], dists[i, j])\n indices.append(frameid, next(eid))\n events.append(cat2, oids[i], hids[j], dists[i, j])\n if vf != '' and (cat1 != 'MATCH' or cat2 != 'MATCH'):\n if cat1 == 'SWITCH':\n vf.write('%s %d %d %d %d %d\\n' % (\n subcat[:2], o, self.last_match[o], self.m[o], frameid, h))\n if cat2 == 'TRANSFER':\n vf.write('%s %d %d %d %d %d\\n' % (\n subcat[:2], h, self.hypHistory[h], self.res_m[h], frameid, o))\n self.hypHistory[h] = frameid\n self.last_match[o] = frameid\n indices.append(frameid, next(eid))\n events.append(cat1, oids[i], hids[j], dists[i, j])\n oids_masked[i] = True\n hids_masked[j] = True\n self.m[o] = h\n self.res_m[h] = o\n\n # 3. All remaining objects are missed\n for o in oids[~oids_masked]:\n indices.append(frameid, next(eid))\n events.append('MISS', o, np.nan, np.nan)\n if vf != '':\n vf.write('FN %d %d\\n' % (frameid, o))\n\n # 4. All remaining hypotheses are false alarms\n for h in hids[~hids_masked]:\n indices.append(frameid, next(eid))\n events.append('FP', np.nan, h, np.nan)\n if vf != '':\n vf.write('FP %d %d\\n' % (frameid, h))\n\n # 5. Update occurance state\n for o in oids:\n self.last_occurrence[o] = frameid\n\n return frameid"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "checkpoint_filter_fn", "code": "def checkpoint_filter_fn(state_dict, model):\n \"\"\" convert patch embedding weight from manual patchify + linear proj to conv\"\"\"\n out_dict = {}\n if 'model' in state_dict:\n # For deit models\n state_dict = state_dict['model']\n for k, v in state_dict.items():\n if 'patch_embed.proj.weight' in k and len(v.shape) < 4:\n # For old models that I trained prior to conv based patchification\n O, I, H, W = model.patch_embed.proj.weight.shape\n v = v.reshape(O, -1, H, W)\n elif k == 'pos_embed' and v.shape != model.pos_embed.shape:\n # To resize pos embedding when using model at different size from pretrained weights\n v = resize_pos_embed(v, model.pos_embed, getattr(model, 'num_tokens', 1),\n model.patch_embed.grid_size)\n out_dict[k] = v\n return out_dict", "docstring": "\"\"\" convert patch embedding weight from manual patchify + linear proj to conv\"\"\"", "url": "https://github.com/HazyResearch/safari/blob/02220c69d247e5473616cd053a443ad99fd2559b/src/models/baselines/vit_all.py#L341-L357", "sha": "02220c69d247e5473616cd053a443ad99fd2559b", "code/function": "def checkpoint_filter_fn(state_dict, model):\n \n out_dict = {}\n if 'model' in state_dict:\n # For deit models\n state_dict = state_dict['model']\n for k, v in state_dict.items():\n if 'patch_embed.proj.weight' in k and len(v.shape) < 4:\n # For old models that I trained prior to conv based patchification\n O, I, H, W = model.patch_embed.proj.weight.shape\n v = v.reshape(O, -1, H, W)\n elif k == 'pos_embed' and v.shape != model.pos_embed.shape:\n # To resize pos embedding when using model at different size from pretrained weights\n v = resize_pos_embed(v, model.pos_embed, getattr(model, 'num_tokens', 1),\n model.patch_embed.grid_size)\n out_dict[k] = v\n return out_dict"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "setup_filters", "code": "def setup_filters(self, filter_cls, filter_args): \n \"Initializes the explicit and implicit filters\"\n assert self.order >= 2, f'Order must be at least 2, (got {self.order})'\n total_width = self.d_model * self.inner_factor * (self.order + 1)\n \n self.short_filter = nn.Conv1d(\n in_channels=total_width, \n out_channels=total_width, \n kernel_size=self.short_filter_order, \n groups=total_width, \n padding=self.short_filter_order - 1\n )\n \n filter_cls = instantiate(registry.layer, filter_cls, partial=True)\n \n self.filter_fn = filter_cls(\n self.head_dim * self.inner_factor * (self.order - 1), \n order=self.filter_order, \n seq_len=self.l_max,\n channels=1, \n dropout=self.filter_dropout, \n **filter_args\n ) \n if self.jit_filter: self.filter_fn = torch.jit.script(self.filter_fn, self.L)", "docstring": "\"Initializes the explicit and implicit filters\"", "url": "https://github.com/HazyResearch/safari/blob/02220c69d247e5473616cd053a443ad99fd2559b/src/models/sequence/hyena.py#L280-L303", "sha": "02220c69d247e5473616cd053a443ad99fd2559b", "code/function": "def setup_filters(self, filter_cls, filter_args): \n \n assert self.order >= 2, f'Order must be at least 2, (got {self.order})'\n total_width = self.d_model * self.inner_factor * (self.order + 1)\n \n self.short_filter = nn.Conv1d(\n in_channels=total_width, \n out_channels=total_width, \n kernel_size=self.short_filter_order, \n groups=total_width, \n padding=self.short_filter_order - 1\n )\n \n filter_cls = instantiate(registry.layer, filter_cls, partial=True)\n \n self.filter_fn = filter_cls(\n self.head_dim * self.inner_factor * (self.order - 1), \n order=self.filter_order, \n seq_len=self.l_max,\n channels=1, \n dropout=self.filter_dropout, \n **filter_args\n ) \n if self.jit_filter: self.filter_fn = torch.jit.script(self.filter_fn, self.L)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "custom_weight_dir_reduce_loss", "code": "@mmcv.jit(derivate=True, coderize=True)\ndef custom_weight_dir_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None):\n \"\"\"Apply element-wise weight and reduce loss.\n\n Args:\n loss (Tensor): num_sample, num_dir\n weight (Tensor): Element-wise weights.\n reduction (str): Same as built-in losses of PyTorch.\n avg_factor (float): Average factor when computing the mean of losses.\n\n Returns:\n Tensor: Processed loss values.\n \"\"\"\n # if weight is specified, apply element-wise weight\n if weight is not None:\n loss = loss * weight\n\n # if avg_factor is not specified, just reduce the loss\n if avg_factor is None:\n raise ValueError('avg_factor should not be none for OrderedPtsL1Loss')\n # loss = reduce_loss(loss, reduction)\n else:\n # if reduction is mean, then average the loss by avg_factor\n if reduction == 'mean':\n # import pdb;pdb.set_trace()\n # loss = loss.permute(1,0,2,3).contiguous()\n loss = loss.sum()\n loss = loss / avg_factor\n # if reduction is 'none', then do nothing, otherwise raise an error\n elif reduction != 'none':\n raise ValueError('avg_factor can not be used with reduction=\"sum\"')\n return loss", "docstring": "\"\"\"Apply element-wise weight and reduce loss.\n\n Args:\n loss (Tensor): num_sample, num_dir\n weight (Tensor): Element-wise weights.\n reduction (str): Same as built-in losses of PyTorch.\n avg_factor (float): Average factor when computing the mean of losses.\n\n Returns:\n Tensor: Processed loss values.\n \"\"\"", "url": "https://github.com/hustvl/VAD/blob/70bb364aa3f33316960da06053c0d168628fb15f/projects/mmdet3d_plugin/VAD/utils/CD_loss.py#L33-L64", "sha": "70bb364aa3f33316960da06053c0d168628fb15f", "code/function": "@mmcv.jit(derivate=True, coderize=True)\ndef custom_weight_dir_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None):\n \n # if weight is specified, apply element-wise weight\n if weight is not None:\n loss = loss * weight\n\n # if avg_factor is not specified, just reduce the loss\n if avg_factor is None:\n raise ValueError('avg_factor should not be none for OrderedPtsL1Loss')\n # loss = reduce_loss(loss, reduction)\n else:\n # if reduction is mean, then average the loss by avg_factor\n if reduction == 'mean':\n # import pdb;pdb.set_trace()\n # loss = loss.permute(1,0,2,3).contiguous()\n loss = loss.sum()\n loss = loss / avg_factor\n # if reduction is 'none', then do nothing, otherwise raise an error\n elif reduction != 'none':\n raise ValueError('avg_factor can not be used with reduction=\"sum\"')\n return loss"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "add_model_config", "code": "def add_model_config(path):\n \"\"\"add model config path or file and update registry\"\"\"\n if not isinstance(path, Path):\n path = Path(path)\n _MODEL_CONFIG_PATHS.append(path)\n _rescan_model_configs()", "docstring": "\"\"\"add model config path or file and update registry\"\"\"", "url": "https://github.com/deepglint/unicom/blob/7e503c908a1b9deb6cd2c2d6287500b95233884f/downstream/llava/model/multimodal_encoder/dev_eva_clip/eva_clip/factory.py#L62-L67", "sha": "7e503c908a1b9deb6cd2c2d6287500b95233884f", "code/function": "def add_model_config(path):\n \n if not isinstance(path, Path):\n path = Path(path)\n _MODEL_CONFIG_PATHS.append(path)\n _rescan_model_configs()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WebAction.set_config_value", "code": "@staticmethod\n def set_config_value(cfg, cfg_key, cfg_value):\n \"\"\"\n 根据Key设置配置值\n \"\"\"\n # 密码\n if cfg_key == \"app.login_password\":\n if cfg_value and not cfg_value.startswith(\"[hash]\"):\n cfg['app']['login_password'] = \"[hash]%s\" % generate_password_hash(\n cfg_value)\n else:\n cfg['app']['login_password'] = cfg_value or \"password\"\n return cfg\n # 代理\n if cfg_key == \"app.proxies\":\n if cfg_value:\n if not cfg_value.startswith(\"http\") and not cfg_value.startswith(\"sock\"):\n cfg['app']['proxies'] = {\n \"https\": \"http://%s\" % cfg_value, \"http\": \"http://%s\" % cfg_value}\n else:\n cfg['app']['proxies'] = {\"https\": \"%s\" %\n cfg_value, \"http\": \"%s\" % cfg_value}\n else:\n cfg['app']['proxies'] = {\"https\": None, \"http\": None}\n return cfg\n # 豆瓣用户列表\n if cfg_key == \"douban.users\":\n vals = cfg_value.split(\",\")\n cfg['douban']['users'] = vals\n return cfg\n # 索引器\n if cfg_key == \"jackett.indexers\":\n vals = cfg_value.split(\"\\n\")\n cfg['jackett']['indexers'] = vals\n return cfg\n # 最大支持三层赋值\n keys = cfg_key.split(\".\")\n if keys:\n if len(keys) == 1:\n cfg[keys[0]] = cfg_value\n elif len(keys) == 2:\n if not cfg.get(keys[0]):\n cfg[keys[0]] = {}\n cfg[keys[0]][keys[1]] = cfg_value\n elif len(keys) == 3:\n if cfg.get(keys[0]):\n if not cfg[keys[0]].get(keys[1]) or isinstance(cfg[keys[0]][keys[1]], str):\n cfg[keys[0]][keys[1]] = {}\n cfg[keys[0]][keys[1]][keys[2]] = cfg_value\n else:\n cfg[keys[0]] = {}\n cfg[keys[0]][keys[1]] = {}\n cfg[keys[0]][keys[1]][keys[2]] = cfg_value\n\n return cfg", "docstring": "\"\"\"\n 根据Key设置配置值\n \"\"\"", "url": "https://github.com/receyuki/nas-tools/blob/e3a43d4f0896db49de02e9a9201ef2e5877af56f/web/action.py#L299-L353", "sha": "e3a43d4f0896db49de02e9a9201ef2e5877af56f", "code/function": "@staticmethod\n def set_config_value(cfg, cfg_key, cfg_value):\n \n # 密码\n if cfg_key == \"app.login_password\":\n if cfg_value and not cfg_value.startswith(\"[hash]\"):\n cfg['app']['login_password'] = \"[hash]%s\" % generate_password_hash(\n cfg_value)\n else:\n cfg['app']['login_password'] = cfg_value or \"password\"\n return cfg\n # 代理\n if cfg_key == \"app.proxies\":\n if cfg_value:\n if not cfg_value.startswith(\"http\") and not cfg_value.startswith(\"sock\"):\n cfg['app']['proxies'] = {\n \"https\": \"http://%s\" % cfg_value, \"http\": \"http://%s\" % cfg_value}\n else:\n cfg['app']['proxies'] = {\"https\": \"%s\" %\n cfg_value, \"http\": \"%s\" % cfg_value}\n else:\n cfg['app']['proxies'] = {\"https\": None, \"http\": None}\n return cfg\n # 豆瓣用户列表\n if cfg_key == \"douban.users\":\n vals = cfg_value.split(\",\")\n cfg['douban']['users'] = vals\n return cfg\n # 索引器\n if cfg_key == \"jackett.indexers\":\n vals = cfg_value.split(\"\\n\")\n cfg['jackett']['indexers'] = vals\n return cfg\n # 最大支持三层赋值\n keys = cfg_key.split(\".\")\n if keys:\n if len(keys) == 1:\n cfg[keys[0]] = cfg_value\n elif len(keys) == 2:\n if not cfg.get(keys[0]):\n cfg[keys[0]] = {}\n cfg[keys[0]][keys[1]] = cfg_value\n elif len(keys) == 3:\n if cfg.get(keys[0]):\n if not cfg[keys[0]].get(keys[1]) or isinstance(cfg[keys[0]][keys[1]], str):\n cfg[keys[0]][keys[1]] = {}\n cfg[keys[0]][keys[1]][keys[2]] = cfg_value\n else:\n cfg[keys[0]] = {}\n cfg[keys[0]][keys[1]] = {}\n cfg[keys[0]][keys[1]][keys[2]] = cfg_value\n\n return cfg"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "post", "code": "async def post(self) -> \"MultiDictProxy[Union[str, bytes, FileField]]\":\n \"\"\"Return POST parameters.\"\"\"\n if self._post is not None:\n return self._post\n if self._method not in self.POST_METHODS:\n self._post = MultiDictProxy(MultiDict())\n return self._post\n\n content_type = self.content_type\n if content_type not in (\n \"\",\n \"application/x-www-form-urlencoded\",\n \"multipart/form-data\",\n ):\n self._post = MultiDictProxy(MultiDict())\n return self._post\n\n out: MultiDict[Union[str, bytes, FileField]] = MultiDict()\n\n if content_type == \"multipart/form-data\":\n multipart = await self.multipart()\n max_size = self._client_max_size\n\n field = await multipart.next()\n while field is not None:\n size = 0\n field_ct = field.headers.get(hdrs.CONTENT_TYPE)\n\n if isinstance(field, BodyPartReader):\n assert field.name is not None\n\n # Note that according to RFC 7578, the Content-Type header\n # is optional, even for files, so we can't assume it's\n # present.\n # https://tools.ietf.org/html/rfc7578#section-4.4\n if field.filename:\n # store file in temp file\n tmp = tempfile.TemporaryFile()\n chunk = await field.read_chunk(size=2**16)\n while chunk:\n chunk = field.decode(chunk)\n tmp.write(chunk)\n size += len(chunk)\n if 0 < max_size < size:\n tmp.close()\n raise HTTPRequestEntityTooLarge(\n max_size=max_size, actual_size=size\n )\n chunk = await field.read_chunk(size=2**16)\n tmp.seek(0)\n\n if field_ct is None:\n field_ct = \"application/octet-stream\"\n\n ff = FileField(\n field.name,\n field.filename,\n cast(io.BufferedReader, tmp),\n field_ct,\n field.headers,\n )\n out.add(field.name, ff)\n else:\n # deal with ordinary data\n value = await field.read(decode=True)\n if field_ct is None or field_ct.startswith(\"text/\"):\n charset = field.get_charset(default=\"utf-8\")\n out.add(field.name, value.decode(charset))\n else:\n out.add(field.name, value)\n size += len(value)\n if 0 < max_size < size:\n raise HTTPRequestEntityTooLarge(\n max_size=max_size, actual_size=size\n )\n else:\n raise ValueError(\n \"To decode nested multipart you need \" \"to use custom reader\",\n )\n\n field = await multipart.next()\n else:\n data = await self.read()\n if data:\n charset = self.charset or \"utf-8\"\n out.extend(\n parse_qsl(\n data.rstrip().decode(charset),\n keep_blank_values=True,\n encoding=charset,\n )\n )\n\n self._post = MultiDictProxy(out)\n return self._post", "docstring": "\"\"\"Return POST parameters.\"\"\"", "url": "https://github.com/chrislemke/ChatFred/blob/4356986c2cc3eaf57b5329774c9e593c01554789/workflow/src/libs/aiohttp/web_request.py#L677-L771", "sha": "4356986c2cc3eaf57b5329774c9e593c01554789", "code/function": "async def post(self) -> \"MultiDictProxy[Union[str, bytes, FileField]]\":\n \n if self._post is not None:\n return self._post\n if self._method not in self.POST_METHODS:\n self._post = MultiDictProxy(MultiDict())\n return self._post\n\n content_type = self.content_type\n if content_type not in (\n \"\",\n \"application/x-www-form-urlencoded\",\n \"multipart/form-data\",\n ):\n self._post = MultiDictProxy(MultiDict())\n return self._post\n\n out: MultiDict[Union[str, bytes, FileField]] = MultiDict()\n\n if content_type == \"multipart/form-data\":\n multipart = await self.multipart()\n max_size = self._client_max_size\n\n field = await multipart.next()\n while field is not None:\n size = 0\n field_ct = field.headers.get(hdrs.CONTENT_TYPE)\n\n if isinstance(field, BodyPartReader):\n assert field.name is not None\n\n # Note that according to RFC 7578, the Content-Type header\n # is optional, even for files, so we can't assume it's\n # present.\n # https://tools.ietf.org/html/rfc7578#section-4.4\n if field.filename:\n # store file in temp file\n tmp = tempfile.TemporaryFile()\n chunk = await field.read_chunk(size=2**16)\n while chunk:\n chunk = field.decode(chunk)\n tmp.write(chunk)\n size += len(chunk)\n if 0 < max_size < size:\n tmp.close()\n raise HTTPRequestEntityTooLarge(\n max_size=max_size, actual_size=size\n )\n chunk = await field.read_chunk(size=2**16)\n tmp.seek(0)\n\n if field_ct is None:\n field_ct = \"application/octet-stream\"\n\n ff = FileField(\n field.name,\n field.filename,\n cast(io.BufferedReader, tmp),\n field_ct,\n field.headers,\n )\n out.add(field.name, ff)\n else:\n # deal with ordinary data\n value = await field.read(decode=True)\n if field_ct is None or field_ct.startswith(\"text/\"):\n charset = field.get_charset(default=\"utf-8\")\n out.add(field.name, value.decode(charset))\n else:\n out.add(field.name, value)\n size += len(value)\n if 0 < max_size < size:\n raise HTTPRequestEntityTooLarge(\n max_size=max_size, actual_size=size\n )\n else:\n raise ValueError(\n \"To decode nested multipart you need \" \"to use custom reader\",\n )\n\n field = await multipart.next()\n else:\n data = await self.read()\n if data:\n charset = self.charset or \"utf-8\"\n out.extend(\n parse_qsl(\n data.rstrip().decode(charset),\n keep_blank_values=True,\n encoding=charset,\n )\n )\n\n self._post = MultiDictProxy(out)\n return self._post"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "add_put", "code": "def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:\n \"\"\"Shortcut for add_route with method PUT.\"\"\"\n return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)", "docstring": "\"\"\"Shortcut for add_route with method PUT.\"\"\"", "url": "https://github.com/chrislemke/ChatFred/blob/4356986c2cc3eaf57b5329774c9e593c01554789/workflow/src/libs/aiohttp/web_urldispatcher.py#L1168-L1170", "sha": "4356986c2cc3eaf57b5329774c9e593c01554789", "code/function": "def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:\n \n return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "create_memory_object_stream", "code": "def create_memory_object_stream(\n max_buffer_size: float = 0, item_type: type[T_Item] | None = None\n) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:\n \"\"\"\n Create a memory object stream.\n\n :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking\n :param item_type: type of item, for marking the streams with the right generic type for\n static typing (not used at run time)\n :return: a tuple of (send stream, receive stream)\n\n \"\"\"\n if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):\n raise ValueError(\"max_buffer_size must be either an integer or math.inf\")\n if max_buffer_size < 0:\n raise ValueError(\"max_buffer_size cannot be negative\")\n\n state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size)\n return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)", "docstring": "\"\"\"\n Create a memory object stream.\n\n :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking\n :param item_type: type of item, for marking the streams with the right generic type for\n static typing (not used at run time)\n :return: a tuple of (send stream, receive stream)\n\n \"\"\"", "url": "https://github.com/chrislemke/ChatFred/blob/4356986c2cc3eaf57b5329774c9e593c01554789/workflow/src/libs/anyio/_core/_streams.py#L29-L47", "sha": "4356986c2cc3eaf57b5329774c9e593c01554789", "code/function": "def create_memory_object_stream(\n max_buffer_size: float = 0, item_type: type[T_Item] | None = None\n) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:\n \n if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):\n raise ValueError(\"max_buffer_size must be either an integer or math.inf\")\n if max_buffer_size < 0:\n raise ValueError(\"max_buffer_size cannot be negative\")\n\n state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size)\n return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_colors_to_code", "code": "def _colors_to_code(self, fg_color: str, bg_color: str) -> Iterable[str]:\n \"\"\"\n Return a tuple with the vt100 values that represent this color.\n \"\"\"\n # When requesting ANSI colors only, and both fg/bg color were converted\n # to ANSI, ensure that the foreground and background color are not the\n # same. (Unless they were explicitly defined to be the same color.)\n fg_ansi = \"\"\n\n def get(color: str, bg: bool) -> list[int]:\n nonlocal fg_ansi\n\n table = BG_ANSI_COLORS if bg else FG_ANSI_COLORS\n\n if not color or self.color_depth == ColorDepth.DEPTH_1_BIT:\n return []\n\n # 16 ANSI colors. (Given by name.)\n elif color in table:\n return [table[color]]\n\n # RGB colors. (Defined as 'ffffff'.)\n else:\n try:\n rgb = self._color_name_to_rgb(color)\n except ValueError:\n return []\n\n # When only 16 colors are supported, use that.\n if self.color_depth == ColorDepth.DEPTH_4_BIT:\n if bg: # Background.\n if fg_color != bg_color:\n exclude = [fg_ansi]\n else:\n exclude = []\n code, name = _16_bg_colors.get_code(rgb, exclude=exclude)\n return [code]\n else: # Foreground.\n code, name = _16_fg_colors.get_code(rgb)\n fg_ansi = name\n return [code]\n\n # True colors. (Only when this feature is enabled.)\n elif self.color_depth == ColorDepth.DEPTH_24_BIT:\n r, g, b = rgb\n return [(48 if bg else 38), 2, r, g, b]\n\n # 256 RGB colors.\n else:\n return [(48 if bg else 38), 5, _256_colors[rgb]]\n\n result: list[int] = []\n result.extend(get(fg_color, False))\n result.extend(get(bg_color, True))\n\n return map(str, result)", "docstring": "\"\"\"\n Return a tuple with the vt100 values that represent this color.\n \"\"\"", "url": "https://github.com/chrislemke/ChatFred/blob/4356986c2cc3eaf57b5329774c9e593c01554789/workflow/src/libs/prompt_toolkit/output/vt100.py#L319-L374", "sha": "4356986c2cc3eaf57b5329774c9e593c01554789", "code/function": "def _colors_to_code(self, fg_color: str, bg_color: str) -> Iterable[str]:\n \n # When requesting ANSI colors only, and both fg/bg color were converted\n # to ANSI, ensure that the foreground and background color are not the\n # same. (Unless they were explicitly defined to be the same color.)\n fg_ansi = \"\"\n\n def get(color: str, bg: bool) -> list[int]:\n nonlocal fg_ansi\n\n table = BG_ANSI_COLORS if bg else FG_ANSI_COLORS\n\n if not color or self.color_depth == ColorDepth.DEPTH_1_BIT:\n return []\n\n # 16 ANSI colors. (Given by name.)\n elif color in table:\n return [table[color]]\n\n # RGB colors. (Defined as 'ffffff'.)\n else:\n try:\n rgb = self._color_name_to_rgb(color)\n except ValueError:\n return []\n\n # When only 16 colors are supported, use that.\n if self.color_depth == ColorDepth.DEPTH_4_BIT:\n if bg: # Background.\n if fg_color != bg_color:\n exclude = [fg_ansi]\n else:\n exclude = []\n code, name = _16_bg_colors.get_code(rgb, exclude=exclude)\n return [code]\n else: # Foreground.\n code, name = _16_fg_colors.get_code(rgb)\n fg_ansi = name\n return [code]\n\n # True colors. (Only when this feature is enabled.)\n elif self.color_depth == ColorDepth.DEPTH_24_BIT:\n r, g, b = rgb\n return [(48 if bg else 38), 2, r, g, b]\n\n # 256 RGB colors.\n else:\n return [(48 if bg else 38), 5, _256_colors[rgb]]\n\n result: list[int] = []\n result.extend(get(fg_color, False))\n result.extend(get(bg_color, True))\n\n return map(str, result)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "extract_from_urllib3", "code": "def extract_from_urllib3():\n \"\"\"\n Undo monkey-patching by :func:`inject_into_urllib3`.\n \"\"\"\n util.SSLContext = orig_util_SSLContext\n util.ssl_.SSLContext = orig_util_SSLContext\n util.HAS_SNI = orig_util_HAS_SNI\n util.ssl_.HAS_SNI = orig_util_HAS_SNI\n util.IS_SECURETRANSPORT = False\n util.ssl_.IS_SECURETRANSPORT = False", "docstring": "\"\"\"\n Undo monkey-patching by :func:`inject_into_urllib3`.\n \"\"\"", "url": "https://github.com/chrislemke/ChatFred/blob/4356986c2cc3eaf57b5329774c9e593c01554789/workflow/src/libs/urllib3/contrib/securetransport.py#L201-L210", "sha": "4356986c2cc3eaf57b5329774c9e593c01554789", "code/function": "def extract_from_urllib3():\n \n util.SSLContext = orig_util_SSLContext\n util.ssl_.SSLContext = orig_util_SSLContext\n util.HAS_SNI = orig_util_HAS_SNI\n util.ssl_.HAS_SNI = orig_util_HAS_SNI\n util.IS_SECURETRANSPORT = False\n util.ssl_.IS_SECURETRANSPORT = False"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Url.hostname", "code": "@property\n def hostname(self):\n \"\"\"For backwards-compatibility with urlparse. We're nice like that.\"\"\"\n return self.host", "docstring": "\"\"\"For backwards-compatibility with urlparse. We're nice like that.\"\"\"", "url": "https://github.com/chrislemke/ChatFred/blob/4356986c2cc3eaf57b5329774c9e593c01554789/workflow/src/libs/urllib3/util/url.py#L109-L112", "sha": "4356986c2cc3eaf57b5329774c9e593c01554789", "code/function": "@property\n def hostname(self):\n \n return self.host"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "test_decomposition_nested", "code": "def test_decomposition_nested(self):\n \"\"\"Tests decompositions of nested controlled operations\"\"\"\n\n ctrl_op = C_ctrl(C_ctrl(lambda: qml.RZ(0.123, wires=0), control=1), control=2)()\n expected = [\n qml.ops.Controlled(qml.RZ(0.123, wires=0), control_wires=[1, 2]),\n ]\n assert ctrl_op.decomposition() == expected", "docstring": "\"\"\"Tests decompositions of nested controlled operations\"\"\"", "url": "https://github.com/PennyLaneAI/catalyst/blob/729d468ad1bec692242c6b20560a2b9922debb31/frontend/test/pytest/test_quantum_control.py#L1654-L1661", "sha": "729d468ad1bec692242c6b20560a2b9922debb31", "code/function": "def test_decomposition_nested(self):\n \n\n ctrl_op = C_ctrl(C_ctrl(lambda: qml.RZ(0.123, wires=0), control=1), control=2)()\n expected = [\n qml.ops.Controlled(qml.RZ(0.123, wires=0), control_wires=[1, 2]),\n ]\n assert ctrl_op.decomposition() == expected"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "test_pattern_matching_optimization", "code": "@pytest.mark.xfail(\n reason=\"QJIT fails with ValueError: Eagerly computing the adjoint (lazy=False) is only supported on single operators.\"\n)\ndef test_pattern_matching_optimization(backend):\n \"\"\"Test pattern_matching_optimization\"\"\"\n\n def qnode_builder(device_name):\n \"\"\"Builder\"\"\"\n\n ops = [qml.S(0), qml.S(0), qml.Z(0)]\n pattern = qml.tape.QuantumTape(ops)\n\n @partial(qml.transforms.pattern_matching_optimization, pattern_tapes=[pattern])\n @qml.qnode(qml.device(device_name, wires=5))\n def qfunc():\n qml.S(wires=0)\n qml.Z(0)\n qml.S(wires=1)\n qml.CZ(wires=[0, 1])\n qml.S(wires=1)\n qml.S(wires=2)\n qml.CZ(wires=[1, 2])\n qml.S(wires=2)\n return qml.expval(qml.X(0))\n\n return qfunc\n\n qnode_control = qnode_builder(\"default.qubit\")\n qnode_backend = qnode_builder(backend)\n\n jax_jit = jax.jit(qnode_control)\n compiled = qjit(qnode_backend)\n\n expected = jax_jit()\n observed = compiled()\n _, expected_shape = jax.tree_util.tree_flatten(expected)\n _, observed_shape = jax.tree_util.tree_flatten(observed)\n\n assert np.allclose(expected, observed)\n assert expected_shape == observed_shape", "docstring": "\"\"\"Test pattern_matching_optimization\"\"\"", "url": "https://github.com/PennyLaneAI/catalyst/blob/729d468ad1bec692242c6b20560a2b9922debb31/frontend/test/pytest/test_transform.py#L1340-L1379", "sha": "729d468ad1bec692242c6b20560a2b9922debb31", "code/function": "@pytest.mark.xfail(\n reason=\"QJIT fails with ValueError: Eagerly computing the adjoint (lazy=False) is only supported on single operators.\"\n)\ndef test_pattern_matching_optimization(backend):\n \n\n def qnode_builder(device_name):\n \"\"\"Builder\"\"\"\n\n ops = [qml.S(0), qml.S(0), qml.Z(0)]\n pattern = qml.tape.QuantumTape(ops)\n\n @partial(qml.transforms.pattern_matching_optimization, pattern_tapes=[pattern])\n @qml.qnode(qml.device(device_name, wires=5))\n def qfunc():\n qml.S(wires=0)\n qml.Z(0)\n qml.S(wires=1)\n qml.CZ(wires=[0, 1])\n qml.S(wires=1)\n qml.S(wires=2)\n qml.CZ(wires=[1, 2])\n qml.S(wires=2)\n return qml.expval(qml.X(0))\n\n return qfunc\n\n qnode_control = qnode_builder(\"default.qubit\")\n qnode_backend = qnode_builder(backend)\n\n jax_jit = jax.jit(qnode_control)\n compiled = qjit(qnode_backend)\n\n expected = jax_jit()\n observed = compiled()\n _, expected_shape = jax.tree_util.tree_flatten(expected)\n _, observed_shape = jax.tree_util.tree_flatten(observed)\n\n assert np.allclose(expected, observed)\n assert expected_shape == observed_shape"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward", "code": "def forward(self,\n query,\n key,\n value,\n query_pos=None,\n key_pos=None,\n attn_masks=None,\n query_key_padding_mask=None,\n key_padding_mask=None,\n **kwargs):\n \"\"\"Forward function for `TransformerCoder`.\n\n Args:\n query (Tensor): Input query with shape\n `(num_queries, bs, embed_dims)`.\n key (Tensor): The key tensor with shape\n `(num_keys, bs, embed_dims)`.\n value (Tensor): The value tensor with shape\n `(num_keys, bs, embed_dims)`.\n query_pos (Tensor): The positional encoding for `query`.\n Default: None.\n key_pos (Tensor): The positional encoding for `key`.\n Default: None.\n attn_masks (List[Tensor], optional): Each element is 2D Tensor\n which is used in calculation of corresponding attention in\n operation_order. Default: None.\n query_key_padding_mask (Tensor): ByteTensor for `query`, with\n shape [bs, num_queries]. Only used in self-attention\n Default: None.\n key_padding_mask (Tensor): ByteTensor for `query`, with\n shape [bs, num_keys]. Default: None.\n\n Returns:\n Tensor: results with shape [num_queries, bs, embed_dims].\n \"\"\"\n for layer in self.layers:\n query = layer(\n query,\n key,\n value,\n query_pos=query_pos,\n key_pos=key_pos,\n attn_masks=attn_masks,\n query_key_padding_mask=query_key_padding_mask,\n key_padding_mask=key_padding_mask,\n **kwargs)\n return query", "docstring": "\"\"\"Forward function for `TransformerCoder`.\n\n Args:\n query (Tensor): Input query with shape\n `(num_queries, bs, embed_dims)`.\n key (Tensor): The key tensor with shape\n `(num_keys, bs, embed_dims)`.\n value (Tensor): The value tensor with shape\n `(num_keys, bs, embed_dims)`.\n query_pos (Tensor): The positional encoding for `query`.\n Default: None.\n key_pos (Tensor): The positional encoding for `key`.\n Default: None.\n attn_masks (List[Tensor], optional): Each element is 2D Tensor\n which is used in calculation of corresponding attention in\n operation_order. Default: None.\n query_key_padding_mask (Tensor): ByteTensor for `query`, with\n shape [bs, num_queries]. Only used in self-attention\n Default: None.\n key_padding_mask (Tensor): ByteTensor for `query`, with\n shape [bs, num_keys]. Default: None.\n\n Returns:\n Tensor: results with shape [num_queries, bs, embed_dims].\n \"\"\"", "url": "https://github.com/HighCWu/ControlLoRA/blob/a6891215fc587af326ab1234718491741a5c2015/annotator/uniformer/mmcv/cnn/bricks/transformer.py#L549-L595", "sha": "a6891215fc587af326ab1234718491741a5c2015", "code/function": "def forward(self,\n query,\n key,\n value,\n query_pos=None,\n key_pos=None,\n attn_masks=None,\n query_key_padding_mask=None,\n key_padding_mask=None,\n **kwargs):\n \n for layer in self.layers:\n query = layer(\n query,\n key,\n value,\n query_pos=query_pos,\n key_pos=key_pos,\n attn_masks=attn_masks,\n query_key_padding_mask=query_key_padding_mask,\n key_padding_mask=key_padding_mask,\n **kwargs)\n return query"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CausalLmModelBuilder.v1_from_args", "code": "@classmethod\n def v1_from_args(\n cls, vocab_size: int, *, num_layers: int, hidden_dim: int, num_heads: int\n ) -> CausalLM:\n \"\"\"Build a v1 Causal LM.\n\n Args:\n vocab_size: The vocabulary size.\n num_layers: The number of transformer layers.\n hidden_dim: The model hidden dimension.\n num_heads: THe number of attention heads.\n\n Returns:\n Initialized model.\n \"\"\"\n model = CausalLM(\n decoder=Decoder(\n attention_mask=ALiBiAttentionLogitBiasLayer(num_heads),\n emb=TransformerEmbeddings(Embedding(vocab_size, embedding_dim=hidden_dim)),\n transformer=StackedTransformerLayer(\n num_layers,\n layer=TransformerLayer(\n self_attention=TransformerAttentionLayer(\n target_dim=hidden_dim,\n source_dim=hidden_dim,\n num_heads=num_heads,\n structure=\"prenorm\",\n norm=\"rmsnorm\",\n qkv_linear_cls=FusedQKVLinear,\n linear_biases=False,\n ),\n feed_forward=TransformerFeedForwardLayer(\n input_dim=hidden_dim,\n hidden_dim=round(hidden_dim * (21.0 / 8.0)),\n activation=(\n _torch_activation_fn(\"nn.silu\"),\n _torch_activation_fn(\"linear\"),\n ),\n structure=\"prenorm\",\n norm=\"rmsnorm\",\n linear_biases=False,\n ),\n ),\n ),\n output_norm=\"rmsnorm\",\n )\n )\n return model", "docstring": "\"\"\"Build a v1 Causal LM.\n\n Args:\n vocab_size: The vocabulary size.\n num_layers: The number of transformer layers.\n hidden_dim: The model hidden dimension.\n num_heads: THe number of attention heads.\n\n Returns:\n Initialized model.\n \"\"\"", "url": "https://github.com/apple/axlearn/blob/b9551871eae1b887a55b1e7d682cc6db7a51bf1e/axlearn/common/adapter_torch.py#L2061-L2108", "sha": "b9551871eae1b887a55b1e7d682cc6db7a51bf1e", "code/function": "@classmethod\n def v1_from_args(\n cls, vocab_size: int, *, num_layers: int, hidden_dim: int, num_heads: int\n ) -> CausalLM:\n \n model = CausalLM(\n decoder=Decoder(\n attention_mask=ALiBiAttentionLogitBiasLayer(num_heads),\n emb=TransformerEmbeddings(Embedding(vocab_size, embedding_dim=hidden_dim)),\n transformer=StackedTransformerLayer(\n num_layers,\n layer=TransformerLayer(\n self_attention=TransformerAttentionLayer(\n target_dim=hidden_dim,\n source_dim=hidden_dim,\n num_heads=num_heads,\n structure=\"prenorm\",\n norm=\"rmsnorm\",\n qkv_linear_cls=FusedQKVLinear,\n linear_biases=False,\n ),\n feed_forward=TransformerFeedForwardLayer(\n input_dim=hidden_dim,\n hidden_dim=round(hidden_dim * (21.0 / 8.0)),\n activation=(\n _torch_activation_fn(\"nn.silu\"),\n _torch_activation_fn(\"linear\"),\n ),\n structure=\"prenorm\",\n norm=\"rmsnorm\",\n linear_biases=False,\n ),\n ),\n ),\n output_norm=\"rmsnorm\",\n )\n )\n return model"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(\n self,\n *,\n input_size: tuple[int, int],\n num_masking_patches: int,\n num_attempts: int = 10,\n min_mask_patches: int = 16,\n min_aspect: float = 0.3,\n max_mask_patches: Optional[int] = None,\n max_aspect: Optional[float] = None,\n ):\n \"\"\"Initializes MaskingGenerator.\n\n Args:\n input_size: an int tuple that represents (height, width) of the patchified target.\n num_masking_patches: the number of patches to be masked.\n num_attempts: the max number of attempts for one mask generation trial.\n min_mask_patches: the min number of patches for one masking area.\n max_mask_patches: the max number of patches for one masking area. If None, sets to\n num_masking_patches.\n min_aspect: the min aspect ratio (height/width) for one masking area.\n max_aspect: the max aspect ratio for one masking area. If None, sets to 1 / min_aspect.\n\n Raises:\n ValueError: if min_aspect or max_aspect are below 0 or max_aspect is smaller than\n min_aspect.\n \"\"\"\n self.height, self.width = input_size\n # Total number of patches in the pachified input.\n self.num_patches = self.height * self.width\n self.num_masking_patches = num_masking_patches\n self.num_attempts = num_attempts\n self.min_mask_patches = min_mask_patches\n self.max_mask_patches = (\n num_masking_patches if max_mask_patches is None else max_mask_patches\n )\n max_aspect = max_aspect or 1 / min_aspect\n if min_aspect <= 0 or max_aspect <= 0:\n raise ValueError(\"Both min and max aspect ratios need to be positive.\")\n if min_aspect > max_aspect:\n raise ValueError(\"min_aspect needs to be no greater than max_aspect.\")\n self.log_aspect_ratio = (math.log(min_aspect), math.log(max_aspect))", "docstring": "\"\"\"Initializes MaskingGenerator.\n\n Args:\n input_size: an int tuple that represents (height, width) of the patchified target.\n num_masking_patches: the number of patches to be masked.\n num_attempts: the max number of attempts for one mask generation trial.\n min_mask_patches: the min number of patches for one masking area.\n max_mask_patches: the max number of patches for one masking area. If None, sets to\n num_masking_patches.\n min_aspect: the min aspect ratio (height/width) for one masking area.\n max_aspect: the max aspect ratio for one masking area. If None, sets to 1 / min_aspect.\n\n Raises:\n ValueError: if min_aspect or max_aspect are below 0 or max_aspect is smaller than\n min_aspect.\n \"\"\"", "url": "https://github.com/apple/axlearn/blob/b9551871eae1b887a55b1e7d682cc6db7a51bf1e/axlearn/vision/mask_generator.py#L30-L71", "sha": "b9551871eae1b887a55b1e7d682cc6db7a51bf1e", "code/function": "def __init__(\n self,\n *,\n input_size: tuple[int, int],\n num_masking_patches: int,\n num_attempts: int = 10,\n min_mask_patches: int = 16,\n min_aspect: float = 0.3,\n max_mask_patches: Optional[int] = None,\n max_aspect: Optional[float] = None,\n ):\n \n self.height, self.width = input_size\n # Total number of patches in the pachified input.\n self.num_patches = self.height * self.width\n self.num_masking_patches = num_masking_patches\n self.num_attempts = num_attempts\n self.min_mask_patches = min_mask_patches\n self.max_mask_patches = (\n num_masking_patches if max_mask_patches is None else max_mask_patches\n )\n max_aspect = max_aspect or 1 / min_aspect\n if min_aspect <= 0 or max_aspect <= 0:\n raise ValueError(\"Both min and max aspect ratios need to be positive.\")\n if min_aspect > max_aspect:\n raise ValueError(\"min_aspect needs to be no greater than max_aspect.\")\n self.log_aspect_ratio = (math.log(min_aspect), math.log(max_aspect))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MobileNets.endpoints_dims", "code": "@property\n def endpoints_dims(self) -> dict[str, int]:\n \"\"\"A dict of {endpoint: dim} specifies dimension of intermediate representations.\"\"\"\n return self._endpoints_dims", "docstring": "\"\"\"A dict of {endpoint: dim} specifies dimension of intermediate representations.\"\"\"", "url": "https://github.com/apple/axlearn/blob/b9551871eae1b887a55b1e7d682cc6db7a51bf1e/axlearn/vision/mobilenets.py#L514-L517", "sha": "b9551871eae1b887a55b1e7d682cc6db7a51bf1e", "code/function": "@property\n def endpoints_dims(self) -> dict[str, int]:\n \n return self._endpoints_dims"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "spatial_encoding", "code": "def spatial_encoding(td: TensorDict):\n \"\"\"We use a spatial encoing as proposed in GraphFormer (https://arxiv.org/abs/2106.05234)\n The spatial encoding in GraphFormer determines the distance of the shortest path between and\n nodes i and j and uses a special value for node pairs that cannot be connected at all.\n For any two operations i e=2) and for i>j the negative number of\n operations that starting from j, have been completet before arriving at i (e.g. i=5 j=3 -> e=-2).\n For i=j we set e=0 as well as for operations of different jobs.\n\n :param torch.Tensor[bs, n_ops] ops_job_map: tensor specifying the index of its corresponding job\n :return torch.Tensor[bs, n_ops, n_ops]: length of shortest path between any two operations\n \"\"\"\n bs, _, n_total_ops = td[\"job_ops_adj\"].shape\n max_ops_per_job = int(td[\"job_ops_adj\"].sum(-1).max())\n ops_job_map = td[\"ops_job_map\"]\n pad_mask = td[\"pad_mask\"]\n\n same_job = (ops_job_map[:, None] == ops_job_map[..., None]).to(torch.int32)\n # mask padded\n same_job[pad_mask.unsqueeze(2).expand_as(same_job)] = 0\n same_job[pad_mask.unsqueeze(1).expand_as(same_job)] = 0\n # take upper triangular of same_job and set diagonal to zero for counting purposes\n upper_tri = torch.triu(same_job) - torch.diag(\n torch.ones(n_total_ops, device=td.device)\n )[None].expand_as(same_job)\n # cumsum and masking of operations that do not belong to the same job\n num_jumps = upper_tri.cumsum(2) * upper_tri\n # mirror the matrix\n num_jumps = num_jumps + num_jumps.transpose(1, 2)\n # NOTE: shifted this logic into the spatial encoding module\n # num_jumps = num_jumps + (-num_jumps.transpose(1,2))\n assert not torch.any(num_jumps >= max_ops_per_job)\n # special value for ops of different jobs and self-loops\n num_jumps = torch.where(num_jumps == 0, -1, num_jumps)\n self_mask = torch.eye(n_total_ops).repeat(bs, 1, 1).bool()\n num_jumps[self_mask] = 0\n return num_jumps", "docstring": "\"\"\"We use a spatial encoing as proposed in GraphFormer (https://arxiv.org/abs/2106.05234)\n The spatial encoding in GraphFormer determines the distance of the shortest path between and\n nodes i and j and uses a special value for node pairs that cannot be connected at all.\n For any two operations i e=2) and for i>j the negative number of\n operations that starting from j, have been completet before arriving at i (e.g. i=5 j=3 -> e=-2).\n For i=j we set e=0 as well as for operations of different jobs.\n\n :param torch.Tensor[bs, n_ops] ops_job_map: tensor specifying the index of its corresponding job\n :return torch.Tensor[bs, n_ops, n_ops]: length of shortest path between any two operations\n \"\"\"", "url": "https://github.com/ai4co/rl4co/blob/643ef99d118ce535e615a9441838782a2decf412/rl4co/envs/scheduling/fjsp/utils.py#L157-L193", "sha": "643ef99d118ce535e615a9441838782a2decf412", "code/function": "def spatial_encoding(td: TensorDict):\n \n bs, _, n_total_ops = td[\"job_ops_adj\"].shape\n max_ops_per_job = int(td[\"job_ops_adj\"].sum(-1).max())\n ops_job_map = td[\"ops_job_map\"]\n pad_mask = td[\"pad_mask\"]\n\n same_job = (ops_job_map[:, None] == ops_job_map[..., None]).to(torch.int32)\n # mask padded\n same_job[pad_mask.unsqueeze(2).expand_as(same_job)] = 0\n same_job[pad_mask.unsqueeze(1).expand_as(same_job)] = 0\n # take upper triangular of same_job and set diagonal to zero for counting purposes\n upper_tri = torch.triu(same_job) - torch.diag(\n torch.ones(n_total_ops, device=td.device)\n )[None].expand_as(same_job)\n # cumsum and masking of operations that do not belong to the same job\n num_jumps = upper_tri.cumsum(2) * upper_tri\n # mirror the matrix\n num_jumps = num_jumps + num_jumps.transpose(1, 2)\n # NOTE: shifted this logic into the spatial encoding module\n # num_jumps = num_jumps + (-num_jumps.transpose(1,2))\n assert not torch.any(num_jumps >= max_ops_per_job)\n # special value for ops of different jobs and self-loops\n num_jumps = torch.where(num_jumps == 0, -1, num_jumps)\n self_mask = torch.eye(n_total_ops).repeat(bs, 1, 1).bool()\n num_jumps[self_mask] = 0\n return num_jumps"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_create_examples", "code": "def _create_examples(self, lines, set_type):\n \"\"\"Creates examples for the training and dev sets.\"\"\"\n examples = []\n for (i, line) in enumerate(lines):\n guid = \"%s-%s\" % (set_type, i)\n text_a = line[3]\n label = line[1]\n examples.append(\n InputExample(guid=guid,\n text_a=text_a,\n text_b=None,\n label=label))\n return examples", "docstring": "\"\"\"Creates examples for the training and dev sets.\"\"\"", "url": "https://github.com/YyzHarry/SubpopBench/blob/4d3dbbe21029666ef19d040e110ec22908640c5b/utils_glue.py#L183-L195", "sha": "4d3dbbe21029666ef19d040e110ec22908640c5b", "code/function": "def _create_examples(self, lines, set_type):\n \n examples = []\n for (i, line) in enumerate(lines):\n guid = \"%s-%s\" % (set_type, i)\n text_a = line[3]\n label = line[1]\n examples.append(\n InputExample(guid=guid,\n text_a=text_a,\n text_b=None,\n label=label))\n return examples"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_get_database_proxy", "code": "async def _get_database_proxy(self, **kwargs) -> DatabaseProxy:\n \"\"\"Gets the database proxy.\"\"\"\n try:\n if await self._does_database_exist():\n return self.cosmos_client.get_database_client(self.database_name)\n\n if self.create_database:\n return await self.cosmos_client.create_database(self.database_name, **kwargs)\n raise VectorStoreOperationException(f\"Database '{self.database_name}' does not exist.\")\n except Exception as e:\n raise VectorStoreOperationException(f\"Failed to get database proxy for '{id}'.\") from e", "docstring": "\"\"\"Gets the database proxy.\"\"\"", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/python/semantic_kernel/connectors/memory/azure_cosmos_db/azure_cosmos_db_no_sql_base.py#L101-L111", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83", "code/function": "async def _get_database_proxy(self, **kwargs) -> DatabaseProxy:\n \n try:\n if await self._does_database_exist():\n return self.cosmos_client.get_database_client(self.database_name)\n\n if self.create_database:\n return await self.cosmos_client.create_database(self.database_name, **kwargs)\n raise VectorStoreOperationException(f\"Database '{self.database_name}' does not exist.\")\n except Exception as e:\n raise VectorStoreOperationException(f\"Failed to get database proxy for '{id}'.\") from e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_get_underlying_type", "code": "def _get_underlying_type(annotation: Any) -> Any:\n \"\"\"Get the underlying type of the annotation.\"\"\"\n if isinstance(annotation, types.UnionType):\n return _get_non_none_type(annotation.__args__)\n\n if hasattr(annotation, \"__origin__\"):\n if annotation.__origin__ is Union:\n return _get_non_none_type(get_args(annotation))\n\n if isinstance(annotation.__origin__, types.UnionType):\n return _get_non_none_type(annotation.__origin__.__args__)\n\n return annotation.__origin__\n\n return annotation", "docstring": "\"\"\"Get the underlying type of the annotation.\"\"\"", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/python/semantic_kernel/functions/kernel_function_decorator.py#L88-L102", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83", "code/function": "def _get_underlying_type(annotation: Any) -> Any:\n \n if isinstance(annotation, types.UnionType):\n return _get_non_none_type(annotation.__args__)\n\n if hasattr(annotation, \"__origin__\"):\n if annotation.__origin__ is Union:\n return _get_non_none_type(get_args(annotation))\n\n if isinstance(annotation.__origin__, types.UnionType):\n return _get_non_none_type(annotation.__origin__.__args__)\n\n return annotation.__origin__\n\n return annotation"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_collections", "code": "async def get_collections(self) -> list[str]:\n \"\"\"Nullifies behavior of SemanticTextMemoryBase get_collections.\"\"\"\n return []", "docstring": "\"\"\"Nullifies behavior of SemanticTextMemoryBase get_collections.\"\"\"", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/python/semantic_kernel/memory/null_memory.py#L49-L51", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83", "code/function": "async def get_collections(self) -> list[str]:\n \n return []"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "azure_cognitive_search_memory_store", "code": "@pytest.fixture\ndef azure_cognitive_search_memory_store(azure_ai_search_unit_test_env):\n \"\"\"Fixture to instantiate AzureCognitiveSearchMemoryStore with basic configuration.\"\"\"\n return AzureCognitiveSearchMemoryStore(\n 1536, \"https://test.search.windows.net\", azure_credentials=AzureKeyCredential(\"test_key\")\n )", "docstring": "\"\"\"Fixture to instantiate AzureCognitiveSearchMemoryStore with basic configuration.\"\"\"", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/python/tests/unit/memory/test_azure_cognitive_search_memory_store_unit_tests.py#L13-L18", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83", "code/function": "@pytest.fixture\ndef azure_cognitive_search_memory_store(azure_ai_search_unit_test_env):\n \n return AzureCognitiveSearchMemoryStore(\n 1536, \"https://test.search.windows.net\", azure_credentials=AzureKeyCredential(\"test_key\")\n )"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward", "code": "def forward(self, *xs):\n \"\"\"Forward pass.\n\n Returns:\n tensor: output\n \"\"\"\n output = xs[0]\n\n if len(xs) == 2:\n res = self.resConfUnit1(xs[1])\n output = self.skip_add.add(output, res)\n # output += res\n\n output = self.resConfUnit2(output)\n\n output = nn.functional.interpolate(\n output, scale_factor=2, mode=\"bilinear\", align_corners=self.align_corners\n )\n\n output = self.out_conv(output)\n\n return output", "docstring": "\"\"\"Forward pass.\n\n Returns:\n tensor: output\n \"\"\"", "url": "https://github.com/yu-takagi/StableDiffusionReconstruction/blob/e187d4b3db1d647ee3e1b4256a2068ffd15df683/codes/diffusion_sd2/stablediffusion/ldm/modules/midas/midas/blocks.py#L320-L341", "sha": "e187d4b3db1d647ee3e1b4256a2068ffd15df683", "code/function": "def forward(self, *xs):\n \n output = xs[0]\n\n if len(xs) == 2:\n res = self.resConfUnit1(xs[1])\n output = self.skip_add.add(output, res)\n # output += res\n\n output = self.resConfUnit2(output)\n\n output = nn.functional.interpolate(\n output, scale_factor=2, mode=\"bilinear\", align_corners=self.align_corners\n )\n\n output = self.out_conv(output)\n\n return output"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "posedict_to_pose", "code": "def posedict_to_pose(posedict: Dict[str, float]) -> List[float]:\n \"\"\"Convert a posedict (from an emotion JSON) into a list of morph values (in the order the models expect them).\"\"\"\n # sanity check\n unrecognized_keys = set(posedict.keys()) - set(posedict_keys)\n if unrecognized_keys:\n logger.warning(f\"posedict_to_pose: ignoring unrecognized keys in posedict: {unrecognized_keys}\")\n # Missing keys are fine - keys for zero values can simply be omitted.\n\n pose = [0.0 for i in range(len(posedict_keys))]\n for idx, key in enumerate(posedict_keys):\n pose[idx] = posedict.get(key, 0.0)\n return pose", "docstring": "\"\"\"Convert a posedict (from an emotion JSON) into a list of morph values (in the order the models expect them).\"\"\"", "url": "https://github.com/SillyTavern/SillyTavern-Extras/blob/fdc1ec04b632b1d871cc0ad3a9aa132e985fc398/talkinghead/tha3/app/util.py#L118-L129", "sha": "fdc1ec04b632b1d871cc0ad3a9aa132e985fc398", "code/function": "def posedict_to_pose(posedict: Dict[str, float]) -> List[float]:\n \n # sanity check\n unrecognized_keys = set(posedict.keys()) - set(posedict_keys)\n if unrecognized_keys:\n logger.warning(f\"posedict_to_pose: ignoring unrecognized keys in posedict: {unrecognized_keys}\")\n # Missing keys are fine - keys for zero values can simply be omitted.\n\n pose = [0.0 for i in range(len(posedict_keys))]\n for idx, key in enumerate(posedict_keys):\n pose[idx] = posedict.get(key, 0.0)\n return pose"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "to_tensor", "code": "def to_tensor(self, dtype, device):\n \"\"\"See :func:`BaseInstanceMasks.to_tensor`.\"\"\"\n if len(self.masks) == 0:\n return torch.empty((0, self.height, self.width),\n dtype=dtype,\n device=device)\n ndarray_masks = self.to_ndarray()\n return torch.tensor(ndarray_masks, dtype=dtype, device=device)", "docstring": "\"\"\"See :func:`BaseInstanceMasks.to_tensor`.\"\"\"", "url": "https://github.com/rayleizhu/BiFormer/blob/1697bbbeafb8680524898f1dcaac10defd0604be/object_detection/mmdet/core/mask/structures.py#L881-L888", "sha": "1697bbbeafb8680524898f1dcaac10defd0604be", "code/function": "def to_tensor(self, dtype, device):\n \n if len(self.masks) == 0:\n return torch.empty((0, self.height, self.width),\n dtype=dtype,\n device=device)\n ndarray_masks = self.to_ndarray()\n return torch.tensor(ndarray_masks, dtype=dtype, device=device)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__call__", "code": "def __call__(self, results):\n \"\"\"Call function to load proposals from file.\n\n Args:\n results (dict): Result dict from :obj:`mmdet.CustomDataset`.\n\n Returns:\n dict: The dict contains loaded proposal annotations.\n \"\"\"\n\n proposals = results['proposals']\n if proposals.shape[1] not in (4, 5):\n raise AssertionError(\n 'proposals should have shapes (n, 4) or (n, 5), '\n f'but found {proposals.shape}')\n proposals = proposals[:, :4]\n\n if self.num_max_proposals is not None:\n proposals = proposals[:self.num_max_proposals]\n\n if len(proposals) == 0:\n proposals = np.array([[0, 0, 0, 0]], dtype=np.float32)\n results['proposals'] = proposals\n results['bbox_fields'].append('proposals')\n return results", "docstring": "\"\"\"Call function to load proposals from file.\n\n Args:\n results (dict): Result dict from :obj:`mmdet.CustomDataset`.\n\n Returns:\n dict: The dict contains loaded proposal annotations.\n \"\"\"", "url": "https://github.com/rayleizhu/BiFormer/blob/1697bbbeafb8680524898f1dcaac10defd0604be/object_detection/mmdet/datasets/pipelines/loading.py#L401-L425", "sha": "1697bbbeafb8680524898f1dcaac10defd0604be", "code/function": "def __call__(self, results):\n \n\n proposals = results['proposals']\n if proposals.shape[1] not in (4, 5):\n raise AssertionError(\n 'proposals should have shapes (n, 4) or (n, 5), '\n f'but found {proposals.shape}')\n proposals = proposals[:, :4]\n\n if self.num_max_proposals is not None:\n proposals = proposals[:self.num_max_proposals]\n\n if len(proposals) == 0:\n proposals = np.array([[0, 0, 0, 0]], dtype=np.float32)\n results['proposals'] = proposals\n results['bbox_fields'].append('proposals')\n return results"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward", "code": "def forward(self, x):\n \"\"\"Forward function.\"\"\"\n if self.num_branches == 1:\n return [self.branches[0](x[0])]\n\n for i in range(self.num_branches):\n x[i] = self.branches[i](x[i])\n\n x_fuse = []\n for i in range(len(self.fuse_layers)):\n y = 0\n for j in range(self.num_branches):\n if i == j:\n y += x[j]\n else:\n y += self.fuse_layers[i][j](x[j])\n x_fuse.append(self.relu(y))\n return x_fuse", "docstring": "\"\"\"Forward function.\"\"\"", "url": "https://github.com/rayleizhu/BiFormer/blob/1697bbbeafb8680524898f1dcaac10defd0604be/object_detection/mmdet/models/backbones/hrnet.py#L177-L194", "sha": "1697bbbeafb8680524898f1dcaac10defd0604be", "code/function": "def forward(self, x):\n \n if self.num_branches == 1:\n return [self.branches[0](x[0])]\n\n for i in range(self.num_branches):\n x[i] = self.branches[i](x[i])\n\n x_fuse = []\n for i in range(len(self.fuse_layers)):\n y = 0\n for j in range(self.num_branches):\n if i == j:\n y += x[j]\n else:\n y += self.fuse_layers[i][j](x[j])\n x_fuse.append(self.relu(y))\n return x_fuse"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "varifocal_loss", "code": "@mmcv.jit(derivate=True, coderize=True)\ndef varifocal_loss(pred,\n target,\n weight=None,\n alpha=0.75,\n gamma=2.0,\n iou_weighted=True,\n reduction='mean',\n avg_factor=None):\n \"\"\"`Varifocal Loss `_\n\n Args:\n pred (torch.Tensor): The prediction with shape (N, C), C is the\n number of classes\n target (torch.Tensor): The learning target of the iou-aware\n classification score with shape (N, C), C is the number of classes.\n weight (torch.Tensor, optional): The weight of loss for each\n prediction. Defaults to None.\n alpha (float, optional): A balance factor for the negative part of\n Varifocal Loss, which is different from the alpha of Focal Loss.\n Defaults to 0.75.\n gamma (float, optional): The gamma for calculating the modulating\n factor. Defaults to 2.0.\n iou_weighted (bool, optional): Whether to weight the loss of the\n positive example with the iou target. Defaults to True.\n reduction (str, optional): The method used to reduce the loss into\n a scalar. Defaults to 'mean'. Options are \"none\", \"mean\" and\n \"sum\".\n avg_factor (int, optional): Average factor that is used to average\n the loss. Defaults to None.\n \"\"\"\n # pred and target should be of the same size\n assert pred.size() == target.size()\n pred_sigmoid = pred.sigmoid()\n target = target.type_as(pred)\n if iou_weighted:\n focal_weight = target * (target > 0.0).float() + \\\n alpha * (pred_sigmoid - target).abs().pow(gamma) * \\\n (target <= 0.0).float()\n else:\n focal_weight = (target > 0.0).float() + \\\n alpha * (pred_sigmoid - target).abs().pow(gamma) * \\\n (target <= 0.0).float()\n loss = F.binary_cross_entropy_with_logits(\n pred, target, reduction='none') * focal_weight\n loss = weight_reduce_loss(loss, weight, reduction, avg_factor)\n return loss", "docstring": "\"\"\"`Varifocal Loss `_\n\n Args:\n pred (torch.Tensor): The prediction with shape (N, C), C is the\n number of classes\n target (torch.Tensor): The learning target of the iou-aware\n classification score with shape (N, C), C is the number of classes.\n weight (torch.Tensor, optional): The weight of loss for each\n prediction. Defaults to None.\n alpha (float, optional): A balance factor for the negative part of\n Varifocal Loss, which is different from the alpha of Focal Loss.\n Defaults to 0.75.\n gamma (float, optional): The gamma for calculating the modulating\n factor. Defaults to 2.0.\n iou_weighted (bool, optional): Whether to weight the loss of the\n positive example with the iou target. Defaults to True.\n reduction (str, optional): The method used to reduce the loss into\n a scalar. Defaults to 'mean'. Options are \"none\", \"mean\" and\n \"sum\".\n avg_factor (int, optional): Average factor that is used to average\n the loss. Defaults to None.\n \"\"\"", "url": "https://github.com/rayleizhu/BiFormer/blob/1697bbbeafb8680524898f1dcaac10defd0604be/object_detection/mmdet/models/losses/varifocal_loss.py#L9-L55", "sha": "1697bbbeafb8680524898f1dcaac10defd0604be", "code/function": "@mmcv.jit(derivate=True, coderize=True)\ndef varifocal_loss(pred,\n target,\n weight=None,\n alpha=0.75,\n gamma=2.0,\n iou_weighted=True,\n reduction='mean',\n avg_factor=None):\n \n # pred and target should be of the same size\n assert pred.size() == target.size()\n pred_sigmoid = pred.sigmoid()\n target = target.type_as(pred)\n if iou_weighted:\n focal_weight = target * (target > 0.0).float() + \\\n alpha * (pred_sigmoid - target).abs().pow(gamma) * \\\n (target <= 0.0).float()\n else:\n focal_weight = (target > 0.0).float() + \\\n alpha * (pred_sigmoid - target).abs().pow(gamma) * \\\n (target <= 0.0).float()\n loss = F.binary_cross_entropy_with_logits(\n pred, target, reduction='none') * focal_weight\n loss = weight_reduce_loss(loss, weight, reduction, avg_factor)\n return loss"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PeopleManager.get_people_manager", "code": "@staticmethod\n def get_people_manager():\n \"\"\"\n Method that returns the current people manager.\n \"\"\"\n return PeopleManager()", "docstring": "\"\"\"\n Method that returns the current people manager.\n \"\"\"", "url": "https://github.com/PegasusSimulator/PegasusSimulator/blob/31367ff57d2c44eb39d8fc7fced5044a24a930eb/extensions/pegasus.simulator/pegasus/simulator/logic/people_manager.py#L59-L64", "sha": "31367ff57d2c44eb39d8fc7fced5044a24a930eb", "code/function": "@staticmethod\n def get_people_manager():\n \n return PeopleManager()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_convert_old_conf", "code": "def _convert_old_conf(console: Console, path: Path) -> str:\n \"\"\"Reads old config file and converts it to the v2 toml.\"\"\"\n cp = ConfigParser()\n cp.read_string(path.read_text())\n if not cp.has_section(\"default\"):\n console.print(f\"The config file {path!s} does not contain any configuration, nothing to migrate.\")\n sys.exit(1)\n\n v1_config = dict(cp.items(\"default\"))\n v2_config = v1_to_v2_config_dict(v1_config, env=False, get_config=False)\n v2_config_toml = tomli_w.dumps(v2_config)\n console.print(\n Markdown(f\"Your v1 project config converted to the v2 format looks like this:\\n```toml\\n{v2_config_toml}\\n```\"),\n )\n return v2_config_toml", "docstring": "\"\"\"Reads old config file and converts it to the v2 toml.\"\"\"", "url": "https://github.com/emdgroup/foundry-dev-tools/blob/605e8c1d810dc67f45cefad7edc3f368f39b0d2c/libs/foundry-dev-tools/src/foundry_dev_tools/cli/config.py#L180-L194", "sha": "605e8c1d810dc67f45cefad7edc3f368f39b0d2c", "code/function": "def _convert_old_conf(console: Console, path: Path) -> str:\n \n cp = ConfigParser()\n cp.read_string(path.read_text())\n if not cp.has_section(\"default\"):\n console.print(f\"The config file {path!s} does not contain any configuration, nothing to migrate.\")\n sys.exit(1)\n\n v1_config = dict(cp.items(\"default\"))\n v2_config = v1_to_v2_config_dict(v1_config, env=False, get_config=False)\n v2_config_toml = tomli_w.dumps(v2_config)\n console.print(\n Markdown(f\"Your v1 project config converted to the v2 format looks like this:\\n```toml\\n{v2_config_toml}\\n```\"),\n )\n return v2_config_toml"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_resize_seg", "code": "def _resize_seg(self, results):\n \"\"\"Resize semantic segmentation map with ``results['scale']``.\"\"\"\n for key in results.get('seg_fields', []):\n if self.keep_ratio:\n gt_seg = mmcv.imrescale(\n results[key],\n results['scale'],\n interpolation='nearest',\n backend=self.backend)\n else:\n gt_seg = mmcv.imresize(\n results[key],\n results['scale'],\n interpolation='nearest',\n backend=self.backend)\n results[key] = gt_seg", "docstring": "\"\"\"Resize semantic segmentation map with ``results['scale']``.\"\"\"", "url": "https://github.com/Chasel-Tsui/mmrotate-dcfl/blob/d60ca27234a3276a4ca714b5ad616366a4bbdd9a/mmdet/datasets/pipelines/transforms.py#L268-L283", "sha": "d60ca27234a3276a4ca714b5ad616366a4bbdd9a", "code/function": "def _resize_seg(self, results):\n \n for key in results.get('seg_fields', []):\n if self.keep_ratio:\n gt_seg = mmcv.imrescale(\n results[key],\n results['scale'],\n interpolation='nearest',\n backend=self.backend)\n else:\n gt_seg = mmcv.imresize(\n results[key],\n results['scale'],\n interpolation='nearest',\n backend=self.backend)\n results[key] = gt_seg"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "simple_test", "code": "def simple_test(self, img, img_metas, rescale=False):\n \"\"\"Test function without test time augmentation.\n\n Args:\n imgs (list[torch.Tensor]): List of multiple images\n img_metas (list[dict]): List of image information.\n rescale (bool, optional): Whether to rescale the results.\n Defaults to False.\n\n Returns:\n list[np.ndarray]: proposals\n \"\"\"\n x = self.extract_feat(img)\n # get origin input shape to onnx dynamic input shape\n if torch.onnx.is_in_onnx_export():\n img_shape = torch._shape_as_tensor(img)[2:]\n img_metas[0]['img_shape_for_onnx'] = img_shape\n proposal_list = self.rpn_head.simple_test_rpn(x, img_metas)\n if rescale:\n for proposals, meta in zip(proposal_list, img_metas):\n proposals[:, :4] /= proposals.new_tensor(meta['scale_factor'])\n if torch.onnx.is_in_onnx_export():\n return proposal_list\n\n return [proposal.cpu().numpy() for proposal in proposal_list]", "docstring": "\"\"\"Test function without test time augmentation.\n\n Args:\n imgs (list[torch.Tensor]): List of multiple images\n img_metas (list[dict]): List of image information.\n rescale (bool, optional): Whether to rescale the results.\n Defaults to False.\n\n Returns:\n list[np.ndarray]: proposals\n \"\"\"", "url": "https://github.com/Chasel-Tsui/mmrotate-dcfl/blob/d60ca27234a3276a4ca714b5ad616366a4bbdd9a/mmdet/models/detectors/rpn.py#L91-L115", "sha": "d60ca27234a3276a4ca714b5ad616366a4bbdd9a", "code/function": "def simple_test(self, img, img_metas, rescale=False):\n \n x = self.extract_feat(img)\n # get origin input shape to onnx dynamic input shape\n if torch.onnx.is_in_onnx_export():\n img_shape = torch._shape_as_tensor(img)[2:]\n img_metas[0]['img_shape_for_onnx'] = img_shape\n proposal_list = self.rpn_head.simple_test_rpn(x, img_metas)\n if rescale:\n for proposals, meta in zip(proposal_list, img_metas):\n proposals[:, :4] /= proposals.new_tensor(meta['scale_factor'])\n if torch.onnx.is_in_onnx_export():\n return proposal_list\n\n return [proposal.cpu().numpy() for proposal in proposal_list]"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WebAction.__get_site", "code": "@staticmethod\n def __get_site(data):\n \"\"\"\n 查询单个站点信息\n \"\"\"\n tid = data.get(\"id\")\n site_free = False\n site_2xfree = False\n site_hr = False\n if tid:\n ret = Sites().get_sites(siteid=tid)\n if ret.get(\"signurl\"):\n site_attr = SiteConf().get_grap_conf(ret.get(\"signurl\"))\n if site_attr.get(\"FREE\"):\n site_free = True\n if site_attr.get(\"2XFREE\"):\n site_2xfree = True\n if site_attr.get(\"HR\"):\n site_hr = True\n else:\n ret = []\n return {\"code\": 0, \"site\": ret, \"site_free\": site_free, \"site_2xfree\": site_2xfree, \"site_hr\": site_hr}", "docstring": "\"\"\"\n 查询单个站点信息\n \"\"\"", "url": "https://github.com/linyuan0213/nas-tools/blob/0badded472a89b9171abba049ea05bd6f3611364/web/action.py#L1153-L1174", "sha": "0badded472a89b9171abba049ea05bd6f3611364", "code/function": "@staticmethod\n def __get_site(data):\n \n tid = data.get(\"id\")\n site_free = False\n site_2xfree = False\n site_hr = False\n if tid:\n ret = Sites().get_sites(siteid=tid)\n if ret.get(\"signurl\"):\n site_attr = SiteConf().get_grap_conf(ret.get(\"signurl\"))\n if site_attr.get(\"FREE\"):\n site_free = True\n if site_attr.get(\"2XFREE\"):\n site_2xfree = True\n if site_attr.get(\"HR\"):\n site_hr = True\n else:\n ret = []\n return {\"code\": 0, \"site\": ret, \"site_free\": site_free, \"site_2xfree\": site_2xfree, \"site_hr\": site_hr}"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "del_dataset", "code": "def del_dataset(self, dataset_id, dataset_name):\n \"\"\"\n 考虑做伪删除,但是感觉没必要\n\n \"\"\"\n msg_box = QMessageBox() # 后悔药(不\n msg_box.setWindowTitle(\"提示\")\n msg_box.setText(f\"确认删除数据集 {dataset_name} 吗?\\n{dataset_name} 将会永久失去!(真的很久!)\")\n msg_box.setIcon(QMessageBox.Question)\n\n # 添加按钮\n yes_button = msg_box.addButton(\"确定\", QMessageBox.AcceptRole)\n no_button = msg_box.addButton(\"取消\", QMessageBox.RejectRole)\n\n # 显示消息框,等待用户响应\n msg_box.exec()\n\n # 获取用户的响应\n button_clicked = msg_box.clickedButton()\n if button_clicked == yes_button:\n try:\n # dataset = Dataset.delete().where(Dataset.dataset_id == dataset_id)\n # self.add_dataset_data()\n dataset = Dataset.get(Dataset.dataset_id == dataset_id)\n name = dataset.dataset_name\n dataset.delete_instance()\n del_file_by_dataset_id(dataset_id)\n\n except Exception as e:\n guilogger.error(f\"删除数据集 id={dataset_id} 失败\")\n guilogger.error(e)\n else:\n guilogger.info(f\"数据集 {name} 成功删除\")\n finally:\n self.add_dataset_data()\n\n else:\n pass", "docstring": "\"\"\"\n 考虑做伪删除,但是感觉没必要\n\n \"\"\"", "url": "https://github.com/kslz/sound_dataset_tools2/blob/5b4ce54c2c597b16e246f709322156ca69aa1a20/ui/mygui.py#L397-L434", "sha": "5b4ce54c2c597b16e246f709322156ca69aa1a20", "code/function": "def del_dataset(self, dataset_id, dataset_name):\n \n msg_box = QMessageBox() # 后悔药(不\n msg_box.setWindowTitle(\"提示\")\n msg_box.setText(f\"确认删除数据集 {dataset_name} 吗?\\n{dataset_name} 将会永久失去!(真的很久!)\")\n msg_box.setIcon(QMessageBox.Question)\n\n # 添加按钮\n yes_button = msg_box.addButton(\"确定\", QMessageBox.AcceptRole)\n no_button = msg_box.addButton(\"取消\", QMessageBox.RejectRole)\n\n # 显示消息框,等待用户响应\n msg_box.exec()\n\n # 获取用户的响应\n button_clicked = msg_box.clickedButton()\n if button_clicked == yes_button:\n try:\n # dataset = Dataset.delete().where(Dataset.dataset_id == dataset_id)\n # self.add_dataset_data()\n dataset = Dataset.get(Dataset.dataset_id == dataset_id)\n name = dataset.dataset_name\n dataset.delete_instance()\n del_file_by_dataset_id(dataset_id)\n\n except Exception as e:\n guilogger.error(f\"删除数据集 id={dataset_id} 失败\")\n guilogger.error(e)\n else:\n guilogger.info(f\"数据集 {name} 成功删除\")\n finally:\n self.add_dataset_data()\n\n else:\n pass"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "quat2euler", "code": "def quat2euler(q, order='xyz', degrees=True):\n \"\"\"\n Convert (w, x, y, z) quaternions to xyz euler angles. This is used for bvh output.\n \"\"\"\n q0 = q[..., 0]\n q1 = q[..., 1]\n q2 = q[..., 2]\n q3 = q[..., 3]\n es = torch.empty(q0.shape + (3,), device=q.device, dtype=q.dtype)\n\n if order == 'xyz':\n es[..., 2] = torch.atan2(2 * (q0 * q3 - q1 * q2), q0 * q0 + q1 * q1 - q2 * q2 - q3 * q3)\n es[..., 1] = torch.asin((2 * (q1 * q3 + q0 * q2)).clip(-1, 1))\n es[..., 0] = torch.atan2(2 * (q0 * q1 - q2 * q3), q0 * q0 - q1 * q1 - q2 * q2 + q3 * q3)\n else:\n raise NotImplementedError('Cannot convert to ordering %s' % order)\n\n if degrees:\n es = es * 180 / np.pi\n\n return es", "docstring": "\"\"\"\n Convert (w, x, y, z) quaternions to xyz euler angles. This is used for bvh output.\n \"\"\"", "url": "https://github.com/Kebii/R2ET/blob/41c7e40fcb8a40eb3fb0deccf3d6b88b8230d572/outside-code/transforms.py#L103-L123", "sha": "41c7e40fcb8a40eb3fb0deccf3d6b88b8230d572", "code/function": "def quat2euler(q, order='xyz', degrees=True):\n \n q0 = q[..., 0]\n q1 = q[..., 1]\n q2 = q[..., 2]\n q3 = q[..., 3]\n es = torch.empty(q0.shape + (3,), device=q.device, dtype=q.dtype)\n\n if order == 'xyz':\n es[..., 2] = torch.atan2(2 * (q0 * q3 - q1 * q2), q0 * q0 + q1 * q1 - q2 * q2 - q3 * q3)\n es[..., 1] = torch.asin((2 * (q1 * q3 + q0 * q2)).clip(-1, 1))\n es[..., 0] = torch.atan2(2 * (q0 * q1 - q2 * q3), q0 * q0 - q1 * q1 - q2 * q2 + q3 * q3)\n else:\n raise NotImplementedError('Cannot convert to ordering %s' % order)\n\n if degrees:\n es = es * 180 / np.pi\n\n return es"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "test_get_printer", "code": "def test_get_printer(self):\n \"\"\"Tests the get_printer method.\"\"\"\n self.assertIsInstance(PrinterFactory.get_printer(\"plain\"), PlainPrinter)\n self.assertIsInstance(PrinterFactory.get_printer(\"markdown\"), MarkdownPrinter)", "docstring": "\"\"\"Tests the get_printer method.\"\"\"", "url": "https://github.com/adamyodinsky/TerminalGPT/blob/29aab7d9db5287b70c06abe161937bedc86e7933/tests/unit/test_printer.py#L145-L148", "sha": "29aab7d9db5287b70c06abe161937bedc86e7933", "code/function": "def test_get_printer(self):\n \n self.assertIsInstance(PrinterFactory.get_printer(\"plain\"), PlainPrinter)\n self.assertIsInstance(PrinterFactory.get_printer(\"markdown\"), MarkdownPrinter)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_utils_ToFloat", "code": "def _utils_ToFloat(s):\n \"\"\"Returns float(s) if s is a float. Otherwise None.\n\n Disallows infinities and nans.\n\n Args:\n s: A string to convert to a float.\n\n Returns:\n An float or None.\n \"\"\"\n try:\n x = float(s)\n if x not in [float('inf'), float('-inf')] and x == x: # not NaN\n return x\n else:\n return None\n except ValueError:\n return None", "docstring": "\"\"\"Returns float(s) if s is a float. Otherwise None.\n\n Disallows infinities and nans.\n\n Args:\n s: A string to convert to a float.\n\n Returns:\n An float or None.\n \"\"\"", "url": "https://github.com/google/coding-competitions-archive/blob/87385db7dbd81b281225412b8ad496334536d016/codejam/2019/round_3/napkin_folding/output_validators/validator/napkin_folding.py#L141-L159", "sha": "87385db7dbd81b281225412b8ad496334536d016", "code/function": "def _utils_ToFloat(s):\n \n try:\n x = float(s)\n if x not in [float('inf'), float('-inf')] and x == x: # not NaN\n return x\n else:\n return None\n except ValueError:\n return None"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "check_file_path", "code": "def check_file_path(file_path: Path):\n \"\"\"\n Ensure file extension is in ALLOWED_FILE_TYPES or is a URL.\n If file ends with _translated.txt or _bilingual.txt, skip it.\n If there is any txt file ending with _translated.txt or _bilingual.txt, skip it.\n \"\"\"\n if not file_path.suffix.lower() in ALLOWED_FILE_TYPES and not str(\n file_path).startswith('http'):\n print(f\"File extension {file_path.suffix} is not allowed.\")\n raise Exception(\"Please use a txt file or URL\")\n\n if file_path.stem.endswith(\"_translated\") or file_path.stem.endswith(\n \"extracted_translated\"):\n print(\n f\"You already have a translated file for {file_path}, skipping...\")\n return False\n elif file_path.stem.endswith(\"_bilingual\") or file_path.stem.endswith(\n \"extracted_bilingual\"):\n print(\n f\"You already have a bilingual file for {file_path}, skipping...\")\n return False\n\n if (file_path.with_name(f\"{file_path.stem}_translated.txt\").exists() or\n file_path.with_name(f\"{file_path.stem}_extracted_translated.txt\").exists()):\n print(\n f\"You already have a translated file for {file_path}, skipping...\")\n return False\n\n return True", "docstring": "\"\"\"\n Ensure file extension is in ALLOWED_FILE_TYPES or is a URL.\n If file ends with _translated.txt or _bilingual.txt, skip it.\n If there is any txt file ending with _translated.txt or _bilingual.txt, skip it.\n \"\"\"", "url": "https://github.com/Raychanan/ChatGPT-for-Translation/blob/0c6fe5d1fe66c1faed967e1b3403de5627bc85cc/ChatGPT-translate.py#L234-L262", "sha": "0c6fe5d1fe66c1faed967e1b3403de5627bc85cc", "code/function": "def check_file_path(file_path: Path):\n \n if not file_path.suffix.lower() in ALLOWED_FILE_TYPES and not str(\n file_path).startswith('http'):\n print(f\"File extension {file_path.suffix} is not allowed.\")\n raise Exception(\"Please use a txt file or URL\")\n\n if file_path.stem.endswith(\"_translated\") or file_path.stem.endswith(\n \"extracted_translated\"):\n print(\n f\"You already have a translated file for {file_path}, skipping...\")\n return False\n elif file_path.stem.endswith(\"_bilingual\") or file_path.stem.endswith(\n \"extracted_bilingual\"):\n print(\n f\"You already have a bilingual file for {file_path}, skipping...\")\n return False\n\n if (file_path.with_name(f\"{file_path.stem}_translated.txt\").exists() or\n file_path.with_name(f\"{file_path.stem}_extracted_translated.txt\").exists()):\n print(\n f\"You already have a translated file for {file_path}, skipping...\")\n return False\n\n return True"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_proposal_pos_embed", "code": "def get_proposal_pos_embed(self,\n proposals,\n num_pos_feats=128,\n temperature=10000):\n \"\"\"Get the position embedding of proposal.\"\"\"\n scale = 2 * math.pi\n dim_t = torch.arange(\n num_pos_feats, dtype=torch.float32, device=proposals.device)\n dim_t = temperature**(2 * (dim_t // 2) / num_pos_feats)\n # N, L, 4\n proposals = proposals.sigmoid() * scale\n # N, L, 4, 128\n pos = proposals[:, :, :, None] / dim_t\n # N, L, 4, 64, 2\n pos = torch.stack((pos[:, :, :, 0::2].sin(), pos[:, :, :, 1::2].cos()),\n dim=4).flatten(2)\n return pos", "docstring": "\"\"\"Get the position embedding of proposal.\"\"\"", "url": "https://github.com/shaunyuan22/CFINet/blob/45af342276e883aaacd49e280dba641331786603/mmdet/models/utils/transformer.py#L875-L891", "sha": "45af342276e883aaacd49e280dba641331786603", "code/function": "def get_proposal_pos_embed(self,\n proposals,\n num_pos_feats=128,\n temperature=10000):\n \n scale = 2 * math.pi\n dim_t = torch.arange(\n num_pos_feats, dtype=torch.float32, device=proposals.device)\n dim_t = temperature**(2 * (dim_t // 2) / num_pos_feats)\n # N, L, 4\n proposals = proposals.sigmoid() * scale\n # N, L, 4, 128\n pos = proposals[:, :, :, None] / dim_t\n # N, L, 4, 64, 2\n pos = torch.stack((pos[:, :, :, 0::2].sin(), pos[:, :, :, 1::2].cos()),\n dim=4).flatten(2)\n return pos"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_reference_points", "code": "def get_reference_points(H, W, Z=8, num_points_in_pillar=4, dim='3d', bs=1, device='cpu', dtype=torch.float):\n \"\"\"Get the reference points used in image cross-attention and single plane self-attention.\n Args:\n H, W: spatial shape of tpv.\n Z: hight of pillar.\n D: sample D points uniformly from each pillar.\n device (obj:`device`): The device where\n reference_points should be.\n Returns:\n Tensor: reference points used in decoder, has \\\n shape (bs, num_keys, num_levels, 2).\n \"\"\"\n\n # reference points in 3D space, used in image cross-attention\n if dim == '3d':\n zs = torch.linspace(0.5, Z - 0.5, num_points_in_pillar, dtype=dtype,\n device=device).view(-1, 1, 1).expand(num_points_in_pillar, H, W) / Z\n xs = torch.linspace(0.5, W - 0.5, W, dtype=dtype,\n device=device).view(1, 1, -1).expand(num_points_in_pillar, H, W) / W\n ys = torch.linspace(0.5, H - 0.5, H, dtype=dtype,\n device=device).view(1, -1, 1).expand(num_points_in_pillar, H, W) / H\n ref_3d = torch.stack((xs, ys, zs), -1)\n ref_3d = ref_3d.permute(0, 3, 1, 2).flatten(2).permute(0, 2, 1)\n ref_3d = ref_3d[None].repeat(bs, 1, 1, 1)\n return ref_3d\n\n # reference points on 2D tpv plane, used in self attention in tpvformer04 \n # which is an older version. Now we use get_cross_view_ref_points instead.\n elif dim == '2d':\n ref_y, ref_x = torch.meshgrid(\n torch.linspace(\n 0.5, H - 0.5, H, dtype=dtype, device=device),\n torch.linspace(\n 0.5, W - 0.5, W, dtype=dtype, device=device))\n ref_y = ref_y.reshape(-1)[None] / H\n ref_x = ref_x.reshape(-1)[None] / W\n ref_2d = torch.stack((ref_x, ref_y), -1)\n ref_2d = ref_2d.repeat(bs, 1, 1).unsqueeze(2)\n return ref_2d", "docstring": "\"\"\"Get the reference points used in image cross-attention and single plane self-attention.\n Args:\n H, W: spatial shape of tpv.\n Z: hight of pillar.\n D: sample D points uniformly from each pillar.\n device (obj:`device`): The device where\n reference_points should be.\n Returns:\n Tensor: reference points used in decoder, has \\\n shape (bs, num_keys, num_levels, 2).\n \"\"\"", "url": "https://github.com/wzzheng/OpenOcc/blob/dc80f79276e7048e0a9dc312531ad04c850963fb/model/encoder/tpvformer/utils.py#L76-L114", "sha": "dc80f79276e7048e0a9dc312531ad04c850963fb", "code/function": "def get_reference_points(H, W, Z=8, num_points_in_pillar=4, dim='3d', bs=1, device='cpu', dtype=torch.float):\n \n\n # reference points in 3D space, used in image cross-attention\n if dim == '3d':\n zs = torch.linspace(0.5, Z - 0.5, num_points_in_pillar, dtype=dtype,\n device=device).view(-1, 1, 1).expand(num_points_in_pillar, H, W) / Z\n xs = torch.linspace(0.5, W - 0.5, W, dtype=dtype,\n device=device).view(1, 1, -1).expand(num_points_in_pillar, H, W) / W\n ys = torch.linspace(0.5, H - 0.5, H, dtype=dtype,\n device=device).view(1, -1, 1).expand(num_points_in_pillar, H, W) / H\n ref_3d = torch.stack((xs, ys, zs), -1)\n ref_3d = ref_3d.permute(0, 3, 1, 2).flatten(2).permute(0, 2, 1)\n ref_3d = ref_3d[None].repeat(bs, 1, 1, 1)\n return ref_3d\n\n # reference points on 2D tpv plane, used in self attention in tpvformer04 \n # which is an older version. Now we use get_cross_view_ref_points instead.\n elif dim == '2d':\n ref_y, ref_x = torch.meshgrid(\n torch.linspace(\n 0.5, H - 0.5, H, dtype=dtype, device=device),\n torch.linspace(\n 0.5, W - 0.5, W, dtype=dtype, device=device))\n ref_y = ref_y.reshape(-1)[None] / H\n ref_x = ref_x.reshape(-1)[None] / W\n ref_2d = torch.stack((ref_x, ref_y), -1)\n ref_2d = ref_2d.repeat(bs, 1, 1).unsqueeze(2)\n return ref_2d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_default", "code": "def get_default(key):\n \"\"\"Get default value for key\"\"\"\n return DEFAULTS.get(key)", "docstring": "\"\"\"Get default value for key\"\"\"", "url": "https://github.com/bernhard-42/vscode-ocp-cad-viewer/blob/d32b3b482ee90b6762fa2c9819b6c9edbcf2d66b/ocp_vscode/config.py#L240-L242", "sha": "d32b3b482ee90b6762fa2c9819b6c9edbcf2d66b", "code/function": "def get_default(key):\n \n return DEFAULTS.get(key)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BatchNormalization.version_9", "code": "@classmethod\n def version_9(\n cls, node: onnx_node.OnnxNode, inputs: Sequence[Any]\n ) -> Callable[..., Any]:\n \"\"\"ONNX version_9 BatchNormalization op.\"\"\"\n cls._prepare(node, inputs, onnx_batchnormalization)\n return onnx_batchnormalization", "docstring": "\"\"\"ONNX version_9 BatchNormalization op.\"\"\"", "url": "https://github.com/google/jaxonnxruntime/blob/e20b8defdfd4263c89a5682e6d993499ad5bcb74/jaxonnxruntime/onnx_ops/batchnormalization.py#L56-L62", "sha": "e20b8defdfd4263c89a5682e6d993499ad5bcb74", "code/function": "@classmethod\n def version_9(\n cls, node: onnx_node.OnnxNode, inputs: Sequence[Any]\n ) -> Callable[..., Any]:\n \n cls._prepare(node, inputs, onnx_batchnormalization)\n return onnx_batchnormalization"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Mul.version_13", "code": "@classmethod\n def version_13(\n cls, node: onnx_node.OnnxNode, inputs: Sequence[Any]\n ) -> Callable[..., Any]:\n \"\"\"ONNX version_13 Mul op.\"\"\"\n cls._prepare(node, inputs, onnx_mul)\n return onnx_mul", "docstring": "\"\"\"ONNX version_13 Mul op.\"\"\"", "url": "https://github.com/google/jaxonnxruntime/blob/e20b8defdfd4263c89a5682e6d993499ad5bcb74/jaxonnxruntime/onnx_ops/mul.py#L53-L59", "sha": "e20b8defdfd4263c89a5682e6d993499ad5bcb74", "code/function": "@classmethod\n def version_13(\n cls, node: onnx_node.OnnxNode, inputs: Sequence[Any]\n ) -> Callable[..., Any]:\n \n cls._prepare(node, inputs, onnx_mul)\n return onnx_mul"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "generate_markdown_documentation", "code": "def generate_markdown_documentation(\n pydantic_models: list[type[BaseModel]], model_prefix=\"Model\", fields_prefix=\"Fields\",\n documentation_with_field_description=True\n) -> str:\n \"\"\"\n Generate markdown documentation for a list of Pydantic models.\n\n Args:\n pydantic_models (list[type[BaseModel]]): list of Pydantic model classes.\n model_prefix (str): Prefix for the model section.\n fields_prefix (str): Prefix for the fields section.\n documentation_with_field_description (bool): Include field descriptions in the documentation.\n\n Returns:\n str: Generated text documentation.\n \"\"\"\n documentation = \"\"\n pyd_models: list[tuple[type[BaseModel], bool]] = [(model, True) for model in pydantic_models]\n for model, add_prefix in pyd_models:\n if add_prefix:\n documentation += f\"{model_prefix}: {model.__name__}\\n\"\n else:\n documentation += f\"Model: {model.__name__}\\n\"\n\n # Handling multi-line model description with proper indentation\n\n class_doc = getdoc(model)\n base_class_doc = getdoc(BaseModel)\n class_description = class_doc if class_doc and class_doc != base_class_doc else \"\"\n if class_description != \"\":\n documentation += \" Description: \"\n documentation += format_multiline_description(class_description, 0) + \"\\n\"\n\n if add_prefix:\n # Indenting the fields section\n documentation += f\" {fields_prefix}:\\n\"\n else:\n documentation += f\" Fields:\\n\" # noqa: F541\n if isclass(model) and issubclass(model, BaseModel):\n for name, field_type in get_type_hints(model).items():\n # if name == \"markdown_code_block\":\n # continue\n if get_origin(field_type) == list:\n element_type = get_args(field_type)[0]\n if isclass(element_type) and issubclass(element_type, BaseModel):\n pyd_models.append((element_type, False))\n if get_origin(field_type) == Union:\n element_types = get_args(field_type)\n for element_type in element_types:\n if isclass(element_type) and issubclass(element_type, BaseModel):\n pyd_models.append((element_type, False))\n documentation += generate_field_markdown(\n name, field_type, model, documentation_with_field_description=documentation_with_field_description\n )\n documentation += \"\\n\"\n\n if hasattr(model, \"Config\") and hasattr(model.Config,\n \"json_schema_extra\") and \"example\" in model.Config.json_schema_extra:\n documentation += f\" Expected Example Output for {format_model_and_field_name(model.__name__)}:\\n\"\n json_example = json.dumps(model.Config.json_schema_extra[\"example\"])\n documentation += format_multiline_description(json_example, 2) + \"\\n\"\n\n return documentation", "docstring": "\"\"\"\n Generate markdown documentation for a list of Pydantic models.\n\n Args:\n pydantic_models (list[type[BaseModel]]): list of Pydantic model classes.\n model_prefix (str): Prefix for the model section.\n fields_prefix (str): Prefix for the fields section.\n documentation_with_field_description (bool): Include field descriptions in the documentation.\n\n Returns:\n str: Generated text documentation.\n \"\"\"", "url": "https://github.com/ggerganov/llama.cpp/blob/4078c77f9891831f29ffc7c315c8ec6695ba5ce7/examples/pydantic_models_to_grammar.py#L676-L738", "sha": "4078c77f9891831f29ffc7c315c8ec6695ba5ce7", "code/function": "def generate_markdown_documentation(\n pydantic_models: list[type[BaseModel]], model_prefix=\"Model\", fields_prefix=\"Fields\",\n documentation_with_field_description=True\n) -> str:\n \n documentation = \"\"\n pyd_models: list[tuple[type[BaseModel], bool]] = [(model, True) for model in pydantic_models]\n for model, add_prefix in pyd_models:\n if add_prefix:\n documentation += f\"{model_prefix}: {model.__name__}\\n\"\n else:\n documentation += f\"Model: {model.__name__}\\n\"\n\n # Handling multi-line model description with proper indentation\n\n class_doc = getdoc(model)\n base_class_doc = getdoc(BaseModel)\n class_description = class_doc if class_doc and class_doc != base_class_doc else \"\"\n if class_description != \"\":\n documentation += \" Description: \"\n documentation += format_multiline_description(class_description, 0) + \"\\n\"\n\n if add_prefix:\n # Indenting the fields section\n documentation += f\" {fields_prefix}:\\n\"\n else:\n documentation += f\" Fields:\\n\" # noqa: F541\n if isclass(model) and issubclass(model, BaseModel):\n for name, field_type in get_type_hints(model).items():\n # if name == \"markdown_code_block\":\n # continue\n if get_origin(field_type) == list:\n element_type = get_args(field_type)[0]\n if isclass(element_type) and issubclass(element_type, BaseModel):\n pyd_models.append((element_type, False))\n if get_origin(field_type) == Union:\n element_types = get_args(field_type)\n for element_type in element_types:\n if isclass(element_type) and issubclass(element_type, BaseModel):\n pyd_models.append((element_type, False))\n documentation += generate_field_markdown(\n name, field_type, model, documentation_with_field_description=documentation_with_field_description\n )\n documentation += \"\\n\"\n\n if hasattr(model, \"Config\") and hasattr(model.Config,\n \"json_schema_extra\") and \"example\" in model.Config.json_schema_extra:\n documentation += f\" Expected Example Output for {format_model_and_field_name(model.__name__)}:\\n\"\n json_example = json.dumps(model.Config.json_schema_extra[\"example\"])\n documentation += format_multiline_description(json_example, 2) + \"\\n\"\n\n return documentation"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_ButtonDialBase.templatable", "code": "@classmethod\n def templatable(cls: type[Button]) -> set[str]:\n \"\"\"Return if an attribute is templatable, which is if the type-annotation is str.\"\"\"\n schema = cls.schema()\n properties = schema[\"properties\"]\n return {k for k, v in properties.items() if v[\"allow_template\"]}", "docstring": "\"\"\"Return if an attribute is templatable, which is if the type-annotation is str.\"\"\"", "url": "https://github.com/basnijholt/home-assistant-streamdeck-yaml/blob/e04dc7229b8a6148a511ed455e2df5988bbcf6c4/home_assistant_streamdeck_yaml.py#L187-L192", "sha": "e04dc7229b8a6148a511ed455e2df5988bbcf6c4", "code/function": "@classmethod\n def templatable(cls: type[Button]) -> set[str]:\n \n schema = cls.schema()\n properties = schema[\"properties\"]\n return {k for k, v in properties.items() if v[\"allow_template\"]}"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GroqModel.stream", "code": "@property\n def stream(self) -> bool:\n r\"\"\"Returns whether the model supports streaming. But Groq API does\n not support streaming.\n \"\"\"\n return False", "docstring": "r\"\"\"Returns whether the model supports streaming. But Groq API does\n not support streaming.\n \"\"\"", "url": "https://github.com/camel-ai/camel/blob/4536d76610140ac02f92cb38a3dfc56d95f231ac/camel/models/groq_model.py#L134-L139", "sha": "4536d76610140ac02f92cb38a3dfc56d95f231ac", "code/function": "@property\n def stream(self) -> bool:\n \n return False"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "clean_cache", "code": "def clean_cache():\n \"\"\"\n 清空搜索缓存\n \"\"\"\n search_image_by_text_path_time.cache_clear()\n search_image_by_image.cache_clear()\n search_video_by_image.cache_clear()\n search_video_by_text_path_time.cache_clear()\n search_pexels_video_by_text.cache_clear()", "docstring": "\"\"\"\n 清空搜索缓存\n \"\"\"", "url": "https://github.com/chn-lee-yumi/MaterialSearch/blob/c7a5e94d67c8dd67fc6c7d3f1eb6cf8c89d5467c/search.py#L21-L29", "sha": "c7a5e94d67c8dd67fc6c7d3f1eb6cf8c89d5467c", "code/function": "def clean_cache():\n \n search_image_by_text_path_time.cache_clear()\n search_image_by_image.cache_clear()\n search_video_by_image.cache_clear()\n search_video_by_text_path_time.cache_clear()\n search_pexels_video_by_text.cache_clear()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__validate_time_delta", "code": "def __validate_time_delta(value: str) -> str:\n \"\"\"\n Check to see if passed string is in the list of possible Time Deltas.\n :param value: Time Delta name.\n :return: Passed value or No Return\n \"\"\"\n valid_values = TIME_DELTA_VALUES\n if value in valid_values:\n return value\n else:\n logging.error(\n f\"Invalid time_delta value: {value}. Valid options: {valid_values}\"\n )", "docstring": "\"\"\"\n Check to see if passed string is in the list of possible Time Deltas.\n :param value: Time Delta name.\n :return: Passed value or No Return\n \"\"\"", "url": "https://github.com/akshata29/entaoai/blob/aa6cfbfbf6f19128bcc9135bc75effe38857bd31/api/Python/Utilities/fmp.py#L456-L468", "sha": "aa6cfbfbf6f19128bcc9135bc75effe38857bd31", "code/function": "def __validate_time_delta(value: str) -> str:\n \n valid_values = TIME_DELTA_VALUES\n if value in valid_values:\n return value\n else:\n logging.error(\n f\"Invalid time_delta value: {value}. Valid options: {valid_values}\"\n )"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "intree_extensions", "code": "def intree_extensions(\n paths: Iterable[str], package_dir: Optional[Dict[str, str]] = None\n) -> List[Pybind11Extension]:\n \"\"\"\n Generate Pybind11Extensions from source files directly located in a Python\n source tree.\n\n ``package_dir`` behaves as in ``setuptools.setup``. If unset, the Python\n package root parent is determined as the first parent directory that does\n not contain an ``__init__.py`` file.\n \"\"\"\n exts = []\n\n if package_dir is None:\n for path in paths:\n parent, _ = os.path.split(path)\n while os.path.exists(os.path.join(parent, \"__init__.py\")):\n parent, _ = os.path.split(parent)\n relname, _ = os.path.splitext(os.path.relpath(path, parent))\n qualified_name = relname.replace(os.path.sep, \".\")\n exts.append(Pybind11Extension(qualified_name, [path]))\n return exts\n\n for path in paths:\n for prefix, parent in package_dir.items():\n if path.startswith(parent):\n relname, _ = os.path.splitext(os.path.relpath(path, parent))\n qualified_name = relname.replace(os.path.sep, \".\")\n if prefix:\n qualified_name = prefix + \".\" + qualified_name\n exts.append(Pybind11Extension(qualified_name, [path]))\n break\n else:\n msg = (\n f\"path {path} is not a child of any of the directories listed \"\n f\"in 'package_dir' ({package_dir})\"\n )\n raise ValueError(msg)\n\n return exts", "docstring": "\"\"\"\n Generate Pybind11Extensions from source files directly located in a Python\n source tree.\n\n ``package_dir`` behaves as in ``setuptools.setup``. If unset, the Python\n package root parent is determined as the first parent directory that does\n not contain an ``__init__.py`` file.\n \"\"\"", "url": "https://github.com/TalkUHulk/ai.deploy.box/blob/f937195eab6de38078d1524dae598fd5f142c8c8/python/pybind11/pybind11/setup_helpers.py#L293-L332", "sha": "f937195eab6de38078d1524dae598fd5f142c8c8", "code/function": "def intree_extensions(\n paths: Iterable[str], package_dir: Optional[Dict[str, str]] = None\n) -> List[Pybind11Extension]:\n \n exts = []\n\n if package_dir is None:\n for path in paths:\n parent, _ = os.path.split(path)\n while os.path.exists(os.path.join(parent, \"__init__.py\")):\n parent, _ = os.path.split(parent)\n relname, _ = os.path.splitext(os.path.relpath(path, parent))\n qualified_name = relname.replace(os.path.sep, \".\")\n exts.append(Pybind11Extension(qualified_name, [path]))\n return exts\n\n for path in paths:\n for prefix, parent in package_dir.items():\n if path.startswith(parent):\n relname, _ = os.path.splitext(os.path.relpath(path, parent))\n qualified_name = relname.replace(os.path.sep, \".\")\n if prefix:\n qualified_name = prefix + \".\" + qualified_name\n exts.append(Pybind11Extension(qualified_name, [path]))\n break\n else:\n msg = (\n f\"path {path} is not a child of any of the directories listed \"\n f\"in 'package_dir' ({package_dir})\"\n )\n raise ValueError(msg)\n\n return exts"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "update_sys_path", "code": "def update_sys_path(path_to_add: str) -> None:\n \"\"\"Add given path to `sys.path`.\"\"\"\n if path_to_add not in sys.path and os.path.isdir(path_to_add):\n sys.path.append(path_to_add)", "docstring": "\"\"\"Add given path to `sys.path`.\"\"\"", "url": "https://github.com/microsoft/vscode-mypy/blob/a5cf3e1e33b09dd401190801b5ad32702344540d/bundled/tool/_debug_server.py#L11-L14", "sha": "a5cf3e1e33b09dd401190801b5ad32702344540d", "code/function": "def update_sys_path(path_to_add: str) -> None:\n \n if path_to_add not in sys.path and os.path.isdir(path_to_add):\n sys.path.append(path_to_add)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LoadModel", "code": "def LoadModel(self, request, context):\n \"\"\"\n A gRPC method that loads a model into memory.\n\n Args:\n request: A LoadModelRequest object that contains the request parameters.\n context: A grpc.ServicerContext object that provides information about the RPC.\n\n Returns:\n A Result object that contains the result of the LoadModel operation.\n \"\"\"\n model_name = request.Model\n try:\n kwargs = {}\n if request.Type != \"\":\n kwargs['model_type'] = request.Type\n if request.PipelineType != \"\": # Reuse the PipelineType field for language\n kwargs['lang'] = request.PipelineType\n self.model_name = model_name\n self.model = Reranker(model_name, **kwargs) \n except Exception as err:\n return backend_pb2.Result(success=False, message=f\"Unexpected {err=}, {type(err)=}\")\n\n # Implement your logic here for the LoadModel service\n # Replace this with your desired response\n return backend_pb2.Result(message=\"Model loaded successfully\", success=True)", "docstring": "\"\"\"\n A gRPC method that loads a model into memory.\n\n Args:\n request: A LoadModelRequest object that contains the request parameters.\n context: A grpc.ServicerContext object that provides information about the RPC.\n\n Returns:\n A Result object that contains the result of the LoadModel operation.\n \"\"\"", "url": "https://github.com/mudler/LocalAI/blob/e01acc88c984c60b5a3e60bb1e12d4e232a20f6c/backend/python/rerankers/backend.py#L45-L70", "sha": "e01acc88c984c60b5a3e60bb1e12d4e232a20f6c", "code/function": "def LoadModel(self, request, context):\n \n model_name = request.Model\n try:\n kwargs = {}\n if request.Type != \"\":\n kwargs['model_type'] = request.Type\n if request.PipelineType != \"\": # Reuse the PipelineType field for language\n kwargs['lang'] = request.PipelineType\n self.model_name = model_name\n self.model = Reranker(model_name, **kwargs) \n except Exception as err:\n return backend_pb2.Result(success=False, message=f\"Unexpected {err=}, {type(err)=}\")\n\n # Implement your logic here for the LoadModel service\n # Replace this with your desired response\n return backend_pb2.Result(message=\"Model loaded successfully\", success=True)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_frozen_setattrs", "code": "def _frozen_setattrs(self, name, value):\n \"\"\"\n Attached to frozen classes as __setattr__.\n \"\"\"\n if isinstance(self, BaseException) and name in (\n \"__cause__\",\n \"__context__\",\n ):\n BaseException.__setattr__(self, name, value)\n return\n\n raise FrozenInstanceError()", "docstring": "\"\"\"\n Attached to frozen classes as __setattr__.\n \"\"\"", "url": "https://github.com/gd3kr/BlenderGPT/blob/3fbc3bd3f169d904f8bf8a067807c4a71d3d3b4b/lib/attr/_make.py#L587-L598", "sha": "3fbc3bd3f169d904f8bf8a067807c4a71d3d3b4b", "code/function": "def _frozen_setattrs(self, name, value):\n \n if isinstance(self, BaseException) and name in (\n \"__cause__\",\n \"__context__\",\n ):\n BaseException.__setattr__(self, name, value)\n return\n\n raise FrozenInstanceError()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChatCompletion.create", "code": "@classmethod\n def create(cls, *args, **kwargs):\n \"\"\"\n Creates a new chat completion for the provided messages and parameters.\n\n See https://platform.openai.com/docs/api-reference/chat-completions/create\n for a list of valid parameters.\n \"\"\"\n start = time.time()\n timeout = kwargs.pop(\"timeout\", None)\n\n while True:\n try:\n return super().create(*args, **kwargs)\n except TryAgain as e:\n if timeout is not None and time.time() > start + timeout:\n raise\n\n util.log_info(\"Waiting for model to warm up\", error=e)", "docstring": "\"\"\"\n Creates a new chat completion for the provided messages and parameters.\n\n See https://platform.openai.com/docs/api-reference/chat-completions/create\n for a list of valid parameters.\n \"\"\"", "url": "https://github.com/gd3kr/BlenderGPT/blob/3fbc3bd3f169d904f8bf8a067807c4a71d3d3b4b/lib/openai/api_resources/chat_completion.py#L12-L30", "sha": "3fbc3bd3f169d904f8bf8a067807c4a71d3d3b4b", "code/function": "@classmethod\n def create(cls, *args, **kwargs):\n \n start = time.time()\n timeout = kwargs.pop(\"timeout\", None)\n\n while True:\n try:\n return super().create(*args, **kwargs)\n except TryAgain as e:\n if timeout is not None and time.time() > start + timeout:\n raise\n\n util.log_info(\"Waiting for model to warm up\", error=e)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__getattr__", "code": "def __getattr__(self, name: str):\n \"\"\"Forward missing attributes to the wrapped module.\"\"\"\n try:\n return super().__getattr__(name) # defer to nn.Module's logic\n except AttributeError:\n return getattr(self.model, name)", "docstring": "\"\"\"Forward missing attributes to the wrapped module.\"\"\"", "url": "https://github.com/stochasticai/xTuring/blob/570a0d6f971e47d9dde3d8b183c186e2010ba384/src/xturing/engines/lora_engine/lora.py#L399-L404", "sha": "570a0d6f971e47d9dde3d8b183c186e2010ba384", "code/function": "def __getattr__(self, name: str):\n \n try:\n return super().__getattr__(name) # defer to nn.Module's logic\n except AttributeError:\n return getattr(self.model, name)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self,\n inplanes,\n planes,\n groups=1,\n base_width=4,\n base_channels=64,\n **kwargs):\n \"\"\"Bottleneck block for ResNeXt.\n\n If style is \"pytorch\", the stride-two layer is the 3x3 conv layer, if\n it is \"caffe\", the stride-two layer is the first 1x1 conv layer.\n \"\"\"\n super(Bottleneck, self).__init__(inplanes, planes, **kwargs)\n\n if groups == 1:\n width = self.planes\n else:\n width = math.floor(self.planes *\n (base_width / base_channels)) * groups\n\n self.norm1_name, norm1 = build_norm_layer(\n self.norm_cfg, width, postfix=1)\n self.norm2_name, norm2 = build_norm_layer(\n self.norm_cfg, width, postfix=2)\n self.norm3_name, norm3 = build_norm_layer(\n self.norm_cfg, self.planes * self.expansion, postfix=3)\n\n self.conv1 = build_conv_layer(\n self.conv_cfg,\n self.inplanes,\n width,\n kernel_size=1,\n stride=self.conv1_stride,\n bias=False)\n self.add_module(self.norm1_name, norm1)\n fallback_on_stride = False\n self.with_modulated_dcn = False\n if self.with_dcn:\n fallback_on_stride = self.dcn.pop('fallback_on_stride', False)\n if self.with_sac:\n self.conv2 = build_conv_layer(\n self.sac,\n width,\n width,\n kernel_size=3,\n stride=self.conv2_stride,\n padding=self.dilation,\n dilation=self.dilation,\n groups=groups,\n bias=False)\n elif not self.with_dcn or fallback_on_stride:\n self.conv2 = build_conv_layer(\n self.conv_cfg,\n width,\n width,\n kernel_size=3,\n stride=self.conv2_stride,\n padding=self.dilation,\n dilation=self.dilation,\n groups=groups,\n bias=False)\n else:\n assert self.conv_cfg is None, 'conv_cfg must be None for DCN'\n self.conv2 = build_conv_layer(\n self.dcn,\n width,\n width,\n kernel_size=3,\n stride=self.conv2_stride,\n padding=self.dilation,\n dilation=self.dilation,\n groups=groups,\n bias=False)\n\n self.add_module(self.norm2_name, norm2)\n self.conv3 = build_conv_layer(\n self.conv_cfg,\n width,\n self.planes * self.expansion,\n kernel_size=1,\n bias=False)\n self.add_module(self.norm3_name, norm3)", "docstring": "\"\"\"Bottleneck block for ResNeXt.\n\n If style is \"pytorch\", the stride-two layer is the 3x3 conv layer, if\n it is \"caffe\", the stride-two layer is the first 1x1 conv layer.\n \"\"\"", "url": "https://github.com/zhenyuw16/UniDetector/blob/eb182535178ecfad18142bed2e03b458a0a8f451/mmdet/models/backbones/detectors_resnext.py#L14-L95", "sha": "eb182535178ecfad18142bed2e03b458a0a8f451", "code/function": "def __init__(self,\n inplanes,\n planes,\n groups=1,\n base_width=4,\n base_channels=64,\n **kwargs):\n \n super(Bottleneck, self).__init__(inplanes, planes, **kwargs)\n\n if groups == 1:\n width = self.planes\n else:\n width = math.floor(self.planes *\n (base_width / base_channels)) * groups\n\n self.norm1_name, norm1 = build_norm_layer(\n self.norm_cfg, width, postfix=1)\n self.norm2_name, norm2 = build_norm_layer(\n self.norm_cfg, width, postfix=2)\n self.norm3_name, norm3 = build_norm_layer(\n self.norm_cfg, self.planes * self.expansion, postfix=3)\n\n self.conv1 = build_conv_layer(\n self.conv_cfg,\n self.inplanes,\n width,\n kernel_size=1,\n stride=self.conv1_stride,\n bias=False)\n self.add_module(self.norm1_name, norm1)\n fallback_on_stride = False\n self.with_modulated_dcn = False\n if self.with_dcn:\n fallback_on_stride = self.dcn.pop('fallback_on_stride', False)\n if self.with_sac:\n self.conv2 = build_conv_layer(\n self.sac,\n width,\n width,\n kernel_size=3,\n stride=self.conv2_stride,\n padding=self.dilation,\n dilation=self.dilation,\n groups=groups,\n bias=False)\n elif not self.with_dcn or fallback_on_stride:\n self.conv2 = build_conv_layer(\n self.conv_cfg,\n width,\n width,\n kernel_size=3,\n stride=self.conv2_stride,\n padding=self.dilation,\n dilation=self.dilation,\n groups=groups,\n bias=False)\n else:\n assert self.conv_cfg is None, 'conv_cfg must be None for DCN'\n self.conv2 = build_conv_layer(\n self.dcn,\n width,\n width,\n kernel_size=3,\n stride=self.conv2_stride,\n padding=self.dilation,\n dilation=self.dilation,\n groups=groups,\n bias=False)\n\n self.add_module(self.norm2_name, norm2)\n self.conv3 = build_conv_layer(\n self.conv_cfg,\n width,\n self.planes * self.expansion,\n kernel_size=1,\n bias=False)\n self.add_module(self.norm3_name, norm3)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "init_weights", "code": "def init_weights(self):\n \"\"\"Initialize weights of the head.\"\"\"\n bias_init = bias_init_with_prob(0.1)\n self.heatmap_head[-1].bias.data.fill_(bias_init)\n for head in [self.wh_head, self.offset_head]:\n for m in head.modules():\n if isinstance(m, nn.Conv2d):\n normal_init(m, std=0.001)", "docstring": "\"\"\"Initialize weights of the head.\"\"\"", "url": "https://github.com/zhenyuw16/UniDetector/blob/eb182535178ecfad18142bed2e03b458a0a8f451/mmdet/models/dense_heads/centernet_head.py#L72-L79", "sha": "eb182535178ecfad18142bed2e03b458a0a8f451", "code/function": "def init_weights(self):\n \n bias_init = bias_init_with_prob(0.1)\n self.heatmap_head[-1].bias.data.fill_(bias_init)\n for head in [self.wh_head, self.offset_head]:\n for m in head.modules():\n if isinstance(m, nn.Conv2d):\n normal_init(m, std=0.001)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "insert_missing_modules", "code": "def insert_missing_modules(modules: Dict[str, ModuleType], module_name: str) -> None:\n \"\"\"\n Used by ``import_path`` to create intermediate modules.\n When we want to import a module as \"src.tests.test_foo\" for example, we need\n to create empty modules \"src\" and \"src.tests\" after inserting \"src.tests.test_foo\",\n otherwise \"src.tests.test_foo\" is not importable by ``__import__``.\n\n Based on: https://github.com/pytest-dev/pytest/blob/main/src/_pytest/pathlib.py\n \"\"\"\n import importlib\n\n module_parts = module_name.split(\".\")\n while module_name:\n if module_name not in modules:\n try:\n # If sys.meta_path is empty, calling import_module will issue\n # a warning and raise ModuleNotFoundError. To avoid the\n # warning, we check sys.meta_path explicitly and raise the error\n # ourselves to fall back to creating a dummy module.\n if not sys.meta_path:\n raise ModuleNotFoundError\n importlib.import_module(module_name)\n except ModuleNotFoundError:\n module = ModuleType(\n module_name,\n doc=\"Empty module created by robocorp-tasks.\",\n )\n modules[module_name] = module\n module_parts.pop(-1)\n module_name = \".\".join(module_parts)", "docstring": "\"\"\"\n Used by ``import_path`` to create intermediate modules.\n When we want to import a module as \"src.tests.test_foo\" for example, we need\n to create empty modules \"src\" and \"src.tests\" after inserting \"src.tests.test_foo\",\n otherwise \"src.tests.test_foo\" is not importable by ``__import__``.\n\n Based on: https://github.com/pytest-dev/pytest/blob/main/src/_pytest/pathlib.py\n \"\"\"", "url": "https://github.com/robocorp/robocorp/blob/3df7714109713269f9e6122254bd0d97a55e9f6a/tasks/src/robocorp/tasks/_collect_tasks.py#L37-L66", "sha": "3df7714109713269f9e6122254bd0d97a55e9f6a", "code/function": "def insert_missing_modules(modules: Dict[str, ModuleType], module_name: str) -> None:\n \n import importlib\n\n module_parts = module_name.split(\".\")\n while module_name:\n if module_name not in modules:\n try:\n # If sys.meta_path is empty, calling import_module will issue\n # a warning and raise ModuleNotFoundError. To avoid the\n # warning, we check sys.meta_path explicitly and raise the error\n # ourselves to fall back to creating a dummy module.\n if not sys.meta_path:\n raise ModuleNotFoundError\n importlib.import_module(module_name)\n except ModuleNotFoundError:\n module = ModuleType(\n module_name,\n doc=\"Empty module created by robocorp-tasks.\",\n )\n modules[module_name] = module\n module_parts.pop(-1)\n module_name = \".\".join(module_parts)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self, pattern=None):\n \"\"\"Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationinvokepattern\"\"\"\n self.pattern = pattern", "docstring": "\"\"\"Refer https://docs.microsoft.com/en-us/windows/win32/api/uiautomationclient/nn-uiautomationclient-iuiautomationinvokepattern\"\"\"", "url": "https://github.com/robocorp/robocorp/blob/3df7714109713269f9e6122254bd0d97a55e9f6a/windows/src/robocorp/windows/_vendored/uiautomation/uiautomation.py#L4192-L4194", "sha": "3df7714109713269f9e6122254bd0d97a55e9f6a", "code/function": "def __init__(self, pattern=None):\n \n self.pattern = pattern"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward", "code": "def forward(self, x):\n '''\n x: [batch_size, features, k]\n '''\n b, c, h, w = x.shape\n x = x.contiguous().view(b, c, h*w)\n\n if self.smooth:\n mask = self.softmax(x * self.smooth)\n else:\n mask = self.softmax(x)\n output = mask * x\n output = output.contiguous().view(b, c, h, w)\n\n return output", "docstring": "'''\n x: [batch_size, features, k]\n '''", "url": "https://github.com/jiawen-zhu/ViPT/blob/b316fb0cf29a0552f169360556bdc691e43f8452/lib/models/vipt/vit_ce_prompt.py#L33-L47", "sha": "b316fb0cf29a0552f169360556bdc691e43f8452", "code/function": "def forward(self, x):\n \n b, c, h, w = x.shape\n x = x.contiguous().view(b, c, h*w)\n\n if self.smooth:\n mask = self.softmax(x * self.smooth)\n else:\n mask = self.softmax(x)\n output = mask * x\n output = output.contiguous().view(b, c, h, w)\n\n return output"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_march_rays_train.forward", "code": "@staticmethod\n @custom_fwd(cast_inputs=torch.float32)\n def forward(ctx, rays_o, rays_d, bound, contract, density_bitfield, C, H, nears, fars, perturb=False, dt_gamma=0, max_steps=1024):\n ''' march rays to generate points (forward only)\n Args:\n rays_o/d: float, [N, 3]\n bound: float, scalar\n density_bitfield: uint8: [CHHH // 8]\n C: int\n H: int\n nears/fars: float, [N]\n step_counter: int32, (2), used to count the actual number of generated points.\n mean_count: int32, estimated mean steps to accelerate training. (but will randomly drop rays if the actual point count exceeded this threshold.)\n perturb: bool\n align: int, pad output so its size is dividable by align, set to -1 to disable.\n force_all_rays: bool, ignore step_counter and mean_count, always calculate all rays. Useful if rendering the whole image, instead of some rays.\n dt_gamma: float, called cone_angle in instant-ngp, exponentially accelerate ray marching if > 0. (very significant effect, but generally lead to worse performance)\n max_steps: int, max number of sampled points along each ray, also affect min_stepsize.\n Returns:\n xyzs: float, [M, 3], all generated points' coords. (all rays concated, need to use `rays` to extract points belonging to each ray)\n dirs: float, [M, 3], all generated points' view dirs.\n ts: float, [M, 2], all generated points' ts.\n rays: int32, [N, 2], all rays' (point_offset, point_count), e.g., xyzs[rays[i, 0]:(rays[i, 0] + rays[i, 1])] --> points belonging to rays[i, 0]\n '''\n\n if not rays_o.is_cuda: rays_o = rays_o.cuda()\n if not rays_d.is_cuda: rays_d = rays_d.cuda()\n if not density_bitfield.is_cuda: density_bitfield = density_bitfield.cuda()\n \n rays_o = rays_o.float().contiguous().view(-1, 3)\n rays_d = rays_d.float().contiguous().view(-1, 3)\n density_bitfield = density_bitfield.contiguous()\n\n N = rays_o.shape[0] # num rays\n \n step_counter = torch.zeros(1, dtype=torch.int32, device=rays_o.device) # point counter, ray counter\n \n if perturb:\n noises = torch.rand(N, dtype=rays_o.dtype, device=rays_o.device)\n else:\n noises = torch.zeros(N, dtype=rays_o.dtype, device=rays_o.device)\n \n # first pass: write rays, get total number of points M to render\n rays = torch.empty(N, 2, dtype=torch.int32, device=rays_o.device) # id, offset, num_steps\n get_backend().march_rays_train(rays_o, rays_d, density_bitfield, bound, contract, dt_gamma, max_steps, N, C, H, nears, fars, None, None, None, rays, step_counter, noises)\n\n # allocate based on M\n M = step_counter.item()\n\n xyzs = torch.zeros(M, 3, dtype=rays_o.dtype, device=rays_o.device)\n dirs = torch.zeros(M, 3, dtype=rays_o.dtype, device=rays_o.device)\n ts = torch.zeros(M, 2, dtype=rays_o.dtype, device=rays_o.device)\n\n # second pass: write outputs\n get_backend().march_rays_train(rays_o, rays_d, density_bitfield, bound, contract, dt_gamma, max_steps, N, C, H, nears, fars, xyzs, dirs, ts, rays, step_counter, noises)\n\n return xyzs, dirs, ts, rays", "docstring": "''' march rays to generate points (forward only)\n Args:\n rays_o/d: float, [N, 3]\n bound: float, scalar\n density_bitfield: uint8: [CHHH // 8]\n C: int\n H: int\n nears/fars: float, [N]\n step_counter: int32, (2), used to count the actual number of generated points.\n mean_count: int32, estimated mean steps to accelerate training. (but will randomly drop rays if the actual point count exceeded this threshold.)\n perturb: bool\n align: int, pad output so its size is dividable by align, set to -1 to disable.\n force_all_rays: bool, ignore step_counter and mean_count, always calculate all rays. Useful if rendering the whole image, instead of some rays.\n dt_gamma: float, called cone_angle in instant-ngp, exponentially accelerate ray marching if > 0. (very significant effect, but generally lead to worse performance)\n max_steps: int, max number of sampled points along each ray, also affect min_stepsize.\n Returns:\n xyzs: float, [M, 3], all generated points' coords. (all rays concated, need to use `rays` to extract points belonging to each ray)\n dirs: float, [M, 3], all generated points' view dirs.\n ts: float, [M, 2], all generated points' ts.\n rays: int32, [N, 2], all rays' (point_offset, point_count), e.g., xyzs[rays[i, 0]:(rays[i, 0] + rays[i, 1])] --> points belonging to rays[i, 0]\n '''", "url": "https://github.com/ashawkey/torch-merf/blob/a669be605349c3af5167832f8ead6f69bbf8e697/raymarching/raymarching.py#L195-L251", "sha": "a669be605349c3af5167832f8ead6f69bbf8e697", "code/function": "@staticmethod\n @custom_fwd(cast_inputs=torch.float32)\n def forward(ctx, rays_o, rays_d, bound, contract, density_bitfield, C, H, nears, fars, perturb=False, dt_gamma=0, max_steps=1024):\n \n\n if not rays_o.is_cuda: rays_o = rays_o.cuda()\n if not rays_d.is_cuda: rays_d = rays_d.cuda()\n if not density_bitfield.is_cuda: density_bitfield = density_bitfield.cuda()\n \n rays_o = rays_o.float().contiguous().view(-1, 3)\n rays_d = rays_d.float().contiguous().view(-1, 3)\n density_bitfield = density_bitfield.contiguous()\n\n N = rays_o.shape[0] # num rays\n \n step_counter = torch.zeros(1, dtype=torch.int32, device=rays_o.device) # point counter, ray counter\n \n if perturb:\n noises = torch.rand(N, dtype=rays_o.dtype, device=rays_o.device)\n else:\n noises = torch.zeros(N, dtype=rays_o.dtype, device=rays_o.device)\n \n # first pass: write rays, get total number of points M to render\n rays = torch.empty(N, 2, dtype=torch.int32, device=rays_o.device) # id, offset, num_steps\n get_backend().march_rays_train(rays_o, rays_d, density_bitfield, bound, contract, dt_gamma, max_steps, N, C, H, nears, fars, None, None, None, rays, step_counter, noises)\n\n # allocate based on M\n M = step_counter.item()\n\n xyzs = torch.zeros(M, 3, dtype=rays_o.dtype, device=rays_o.device)\n dirs = torch.zeros(M, 3, dtype=rays_o.dtype, device=rays_o.device)\n ts = torch.zeros(M, 2, dtype=rays_o.dtype, device=rays_o.device)\n\n # second pass: write outputs\n get_backend().march_rays_train(rays_o, rays_d, density_bitfield, bound, contract, dt_gamma, max_steps, N, C, H, nears, fars, xyzs, dirs, ts, rays, step_counter, noises)\n\n return xyzs, dirs, ts, rays"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Collection._save_index_to_disk", "code": "@classmethod\n def _save_index_to_disk(cls, name: str, hnsw_ix : dbHnswIndexConfig) -> Tuple[str, str, int]:\n \"\"\"\n Save the current index to disk and return the filename, md5sum, and count of items in the index\n \"\"\"\n count = len(hnsw_ix.get_ids_list())\n filename = f\"index_{name}.hnsw\"\n try:\n os.unlink(filename)\n except:\n pass\n hnsw_ix.save_index(filename)\n md5sum = md5_file(filename)\n logger.info(f\"saved index to {filename} with md5sum {md5sum} and {count} items\")\n return filename, md5sum, count", "docstring": "\"\"\"\n Save the current index to disk and return the filename, md5sum, and count of items in the index\n \"\"\"", "url": "https://github.com/jiggy-ai/hnsqlite/blob/9824e6c73508d844ea3424ba9f3033da46b9de9f/hnsqlite/collection.py#L255-L269", "sha": "9824e6c73508d844ea3424ba9f3033da46b9de9f", "code/function": "@classmethod\n def _save_index_to_disk(cls, name: str, hnsw_ix : dbHnswIndexConfig) -> Tuple[str, str, int]:\n \n count = len(hnsw_ix.get_ids_list())\n filename = f\"index_{name}.hnsw\"\n try:\n os.unlink(filename)\n except:\n pass\n hnsw_ix.save_index(filename)\n md5sum = md5_file(filename)\n logger.info(f\"saved index to {filename} with md5sum {md5sum} and {count} items\")\n return filename, md5sum, count"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "apply_jigsaw", "code": "def apply_jigsaw(arr, destinations):\n \"\"\"Move cells of an image similar to a jigsaw puzzle.\n\n This function will split the image into ``rows x cols`` cells and\n move each cell to the target index given in `destinations`.\n\n Added in 0.4.0.\n\n **Supported dtypes**:\n\n * ``uint8``: yes; fully tested\n * ``uint16``: yes; fully tested\n * ``uint32``: yes; fully tested\n * ``uint64``: yes; fully tested\n * ``int8``: yes; fully tested\n * ``int16``: yes; fully tested\n * ``int32``: yes; fully tested\n * ``int64``: yes; fully tested\n * ``float16``: yes; fully tested\n * ``float32``: yes; fully tested\n * ``float64``: yes; fully tested\n * ``float128``: yes; fully tested\n * ``bool``: yes; fully tested\n\n Parameters\n ----------\n arr : ndarray\n Array with at least two dimensions denoting height and width.\n\n destinations : ndarray\n 2-dimensional array containing for each cell the id of the destination\n cell. The order is expected to a flattened c-order, i.e. row by row.\n The height of the image must be evenly divisible by the number of\n rows in this array. Analogous for the width and columns.\n\n Returns\n -------\n ndarray\n Modified image with cells moved according to `destinations`.\n\n \"\"\"\n # pylint complains about unravel_index() here\n # pylint: disable=unbalanced-tuple-unpacking\n\n nb_rows, nb_cols = destinations.shape[0:2]\n\n assert arr.ndim >= 2, (\n \"Expected array with at least two dimensions, but got %d with \"\n \"shape %s.\" % (arr.ndim, arr.shape))\n assert (arr.shape[0] % nb_rows) == 0, (\n \"Expected image height to by divisible by number of rows, but got \"\n \"height %d and %d rows. Use cropping or padding to modify the image \"\n \"height or change the number of rows.\" % (arr.shape[0], nb_rows)\n )\n assert (arr.shape[1] % nb_cols) == 0, (\n \"Expected image width to by divisible by number of columns, but got \"\n \"width %d and %d columns. Use cropping or padding to modify the image \"\n \"width or change the number of columns.\" % (arr.shape[1], nb_cols)\n )\n\n cell_height = arr.shape[0] // nb_rows\n cell_width = arr.shape[1] // nb_cols\n\n dest_rows, dest_cols = np.unravel_index(\n destinations.flatten(), (nb_rows, nb_cols))\n\n result = np.zeros_like(arr)\n i = 0\n for source_row in np.arange(nb_rows):\n for source_col in np.arange(nb_cols):\n # TODO vectorize coords computation\n dest_row, dest_col = dest_rows[i], dest_cols[i]\n\n source_y1 = source_row * cell_height\n source_y2 = source_y1 + cell_height\n source_x1 = source_col * cell_width\n source_x2 = source_x1 + cell_width\n\n dest_y1 = dest_row * cell_height\n dest_y2 = dest_y1 + cell_height\n dest_x1 = dest_col * cell_width\n dest_x2 = dest_x1 + cell_width\n\n source = arr[source_y1:source_y2, source_x1:source_x2]\n result[dest_y1:dest_y2, dest_x1:dest_x2] = source\n\n i += 1\n\n return result", "docstring": "\"\"\"Move cells of an image similar to a jigsaw puzzle.\n\n This function will split the image into ``rows x cols`` cells and\n move each cell to the target index given in `destinations`.\n\n Added in 0.4.0.\n\n **Supported dtypes**:\n\n * ``uint8``: yes; fully tested\n * ``uint16``: yes; fully tested\n * ``uint32``: yes; fully tested\n * ``uint64``: yes; fully tested\n * ``int8``: yes; fully tested\n * ``int16``: yes; fully tested\n * ``int32``: yes; fully tested\n * ``int64``: yes; fully tested\n * ``float16``: yes; fully tested\n * ``float32``: yes; fully tested\n * ``float64``: yes; fully tested\n * ``float128``: yes; fully tested\n * ``bool``: yes; fully tested\n\n Parameters\n ----------\n arr : ndarray\n Array with at least two dimensions denoting height and width.\n\n destinations : ndarray\n 2-dimensional array containing for each cell the id of the destination\n cell. The order is expected to a flattened c-order, i.e. row by row.\n The height of the image must be evenly divisible by the number of\n rows in this array. Analogous for the width and columns.\n\n Returns\n -------\n ndarray\n Modified image with cells moved according to `destinations`.\n\n \"\"\"", "url": "https://github.com/thu-ml/3D_Corruptions_AD/blob/48c23f77fe82beab599f8248b7794928334a3fb5/OpenPCDet/OpenPCDet/pcdet/datasets/kitti/utils/imgaug/augmenters/geometric.py#L386-L474", "sha": "48c23f77fe82beab599f8248b7794928334a3fb5", "code/function": "def apply_jigsaw(arr, destinations):\n \n # pylint complains about unravel_index() here\n # pylint: disable=unbalanced-tuple-unpacking\n\n nb_rows, nb_cols = destinations.shape[0:2]\n\n assert arr.ndim >= 2, (\n \"Expected array with at least two dimensions, but got %d with \"\n \"shape %s.\" % (arr.ndim, arr.shape))\n assert (arr.shape[0] % nb_rows) == 0, (\n \"Expected image height to by divisible by number of rows, but got \"\n \"height %d and %d rows. Use cropping or padding to modify the image \"\n \"height or change the number of rows.\" % (arr.shape[0], nb_rows)\n )\n assert (arr.shape[1] % nb_cols) == 0, (\n \"Expected image width to by divisible by number of columns, but got \"\n \"width %d and %d columns. Use cropping or padding to modify the image \"\n \"width or change the number of columns.\" % (arr.shape[1], nb_cols)\n )\n\n cell_height = arr.shape[0] // nb_rows\n cell_width = arr.shape[1] // nb_cols\n\n dest_rows, dest_cols = np.unravel_index(\n destinations.flatten(), (nb_rows, nb_cols))\n\n result = np.zeros_like(arr)\n i = 0\n for source_row in np.arange(nb_rows):\n for source_col in np.arange(nb_cols):\n # TODO vectorize coords computation\n dest_row, dest_col = dest_rows[i], dest_cols[i]\n\n source_y1 = source_row * cell_height\n source_y2 = source_y1 + cell_height\n source_x1 = source_col * cell_width\n source_x2 = source_x1 + cell_width\n\n dest_y1 = dest_row * cell_height\n dest_y2 = dest_y1 + cell_height\n dest_x1 = dest_col * cell_width\n dest_x2 = dest_x1 + cell_width\n\n source = arr[source_y1:source_y2, source_x1:source_x2]\n result[dest_y1:dest_y2, dest_x1:dest_x2] = source\n\n i += 1\n\n return result"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "remove_out_of_image_", "code": "def remove_out_of_image_(self, fully=True, partly=False):\n \"\"\"\n Remove all LS that are fully/partially outside of an image in-place.\n\n Added in 0.4.0.\n\n Parameters\n ----------\n fully : bool, optional\n Whether to remove line strings that are fully outside of the image.\n\n partly : bool, optional\n Whether to remove line strings that are partially outside of the\n image.\n\n Returns\n -------\n imgaug.augmentables.lines.LineStringsOnImage\n Reduced set of line strings. Those that are fully/partially\n outside of the given image plane are removed.\n The object and its items may have been modified in-place.\n\n \"\"\"\n self.line_strings = [\n ls for ls in self.line_strings\n if not ls.is_out_of_image(self.shape, fully=fully, partly=partly)]\n return self", "docstring": "\"\"\"\n Remove all LS that are fully/partially outside of an image in-place.\n\n Added in 0.4.0.\n\n Parameters\n ----------\n fully : bool, optional\n Whether to remove line strings that are fully outside of the image.\n\n partly : bool, optional\n Whether to remove line strings that are partially outside of the\n image.\n\n Returns\n -------\n imgaug.augmentables.lines.LineStringsOnImage\n Reduced set of line strings. Those that are fully/partially\n outside of the given image plane are removed.\n The object and its items may have been modified in-place.\n\n \"\"\"", "url": "https://github.com/thu-ml/3D_Corruptions_AD/blob/48c23f77fe82beab599f8248b7794928334a3fb5/utils/imgaug/augmentables/lines.py#L1928-L1954", "sha": "48c23f77fe82beab599f8248b7794928334a3fb5", "code/function": "def remove_out_of_image_(self, fully=True, partly=False):\n \n self.line_strings = [\n ls for ls in self.line_strings\n if not ls.is_out_of_image(self.shape, fully=fully, partly=partly)]\n return self"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "show_result", "code": "def show_result(self,\n img,\n result,\n score_thr=0.3,\n bbox_color=(72, 101, 241),\n text_color=(72, 101, 241),\n mask_color=None,\n thickness=2,\n font_size=13,\n win_name='',\n show=False,\n wait_time=0,\n out_file=None):\n \"\"\"Draw `result` over `img`.\n\n Args:\n img (str or Tensor): The image to be displayed.\n result (dict): The results.\n\n score_thr (float, optional): Minimum score of bboxes to be shown.\n Default: 0.3.\n bbox_color (str or tuple(int) or :obj:`Color`):Color of bbox lines.\n The tuple of color should be in BGR order. Default: 'green'.\n text_color (str or tuple(int) or :obj:`Color`):Color of texts.\n The tuple of color should be in BGR order. Default: 'green'.\n mask_color (None or str or tuple(int) or :obj:`Color`):\n Color of masks. The tuple of color should be in BGR order.\n Default: None.\n thickness (int): Thickness of lines. Default: 2.\n font_size (int): Font size of texts. Default: 13.\n win_name (str): The window name. Default: ''.\n wait_time (float): Value of waitKey param.\n Default: 0.\n show (bool): Whether to show the image.\n Default: False.\n out_file (str or None): The filename to write the image.\n Default: None.\n\n Returns:\n img (Tensor): Only if not `show` or `out_file`.\n \"\"\"\n img = mmcv.imread(img)\n img = img.copy()\n pan_results = result['pan_results']\n # keep objects ahead\n ids = np.unique(pan_results)[::-1]\n legal_indices = ids != self.num_classes # for VOID label\n ids = ids[legal_indices]\n labels = np.array([id % INSTANCE_OFFSET for id in ids], dtype=np.int64)\n segms = (pan_results[None] == ids[:, None, None])\n\n # if out_file specified, do not show image in window\n if out_file is not None:\n show = False\n # draw bounding boxes\n img = imshow_det_bboxes(\n img,\n segms=segms,\n labels=labels,\n class_names=self.CLASSES,\n bbox_color=bbox_color,\n text_color=text_color,\n mask_color=mask_color,\n thickness=thickness,\n font_size=font_size,\n win_name=win_name,\n show=show,\n wait_time=wait_time,\n out_file=out_file)\n\n if not (show or out_file):\n return img", "docstring": "\"\"\"Draw `result` over `img`.\n\n Args:\n img (str or Tensor): The image to be displayed.\n result (dict): The results.\n\n score_thr (float, optional): Minimum score of bboxes to be shown.\n Default: 0.3.\n bbox_color (str or tuple(int) or :obj:`Color`):Color of bbox lines.\n The tuple of color should be in BGR order. Default: 'green'.\n text_color (str or tuple(int) or :obj:`Color`):Color of texts.\n The tuple of color should be in BGR order. Default: 'green'.\n mask_color (None or str or tuple(int) or :obj:`Color`):\n Color of masks. The tuple of color should be in BGR order.\n Default: None.\n thickness (int): Thickness of lines. Default: 2.\n font_size (int): Font size of texts. Default: 13.\n win_name (str): The window name. Default: ''.\n wait_time (float): Value of waitKey param.\n Default: 0.\n show (bool): Whether to show the image.\n Default: False.\n out_file (str or None): The filename to write the image.\n Default: None.\n\n Returns:\n img (Tensor): Only if not `show` or `out_file`.\n \"\"\"", "url": "https://github.com/Cuogeihong/CEASC/blob/2abfd1a99f1b0fe1ed3d51588b64549e1584da50/mmdet/models/detectors/panoptic_two_stage_segmentor.py#L208-L279", "sha": "2abfd1a99f1b0fe1ed3d51588b64549e1584da50", "code/function": "def show_result(self,\n img,\n result,\n score_thr=0.3,\n bbox_color=(72, 101, 241),\n text_color=(72, 101, 241),\n mask_color=None,\n thickness=2,\n font_size=13,\n win_name='',\n show=False,\n wait_time=0,\n out_file=None):\n \n img = mmcv.imread(img)\n img = img.copy()\n pan_results = result['pan_results']\n # keep objects ahead\n ids = np.unique(pan_results)[::-1]\n legal_indices = ids != self.num_classes # for VOID label\n ids = ids[legal_indices]\n labels = np.array([id % INSTANCE_OFFSET for id in ids], dtype=np.int64)\n segms = (pan_results[None] == ids[:, None, None])\n\n # if out_file specified, do not show image in window\n if out_file is not None:\n show = False\n # draw bounding boxes\n img = imshow_det_bboxes(\n img,\n segms=segms,\n labels=labels,\n class_names=self.CLASSES,\n bbox_color=bbox_color,\n text_color=text_color,\n mask_color=mask_color,\n thickness=thickness,\n font_size=font_size,\n win_name=win_name,\n show=show,\n wait_time=wait_time,\n out_file=out_file)\n\n if not (show or out_file):\n return img"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward_train", "code": "def forward_train(self, gt_masks=None, gt_semantic_seg=None, **kwargs):\n \"\"\"HeuristicFusionHead has no training loss.\"\"\"\n return dict()", "docstring": "\"\"\"HeuristicFusionHead has no training loss.\"\"\"", "url": "https://github.com/Cuogeihong/CEASC/blob/2abfd1a99f1b0fe1ed3d51588b64549e1584da50/mmdet/models/seg_heads/panoptic_fusion_heads/heuristic_fusion_head.py#L23-L25", "sha": "2abfd1a99f1b0fe1ed3d51588b64549e1584da50", "code/function": "def forward_train(self, gt_masks=None, gt_semantic_seg=None, **kwargs):\n \n return dict()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EvictionBase", "code": "def EvictionBase(name: str, **kwargs):\n \"\"\"Generate specific CacheStorage with the configuration.\n\n :param name: the name of the eviction, like: memory\n :type name: str\n\n :param policy: eviction strategy\n :type policy: str\n :param maxsize: the maxsize of cache data\n :type maxsize: int\n :param clean_size: will clean the size of data when the size of cache data reaches the max size\n :type clean_size: int\n :param on_evict: the function for cleaning the data in the store\n :type on_evict: Callable[[List[Any]], None]\n\n Example:\n .. code-block:: python\n\n from gptcache.manager import EvictionBase\n\n cache_base = EvictionBase('memory', policy='lru', maxsize=10, clean_size=2, on_evict=lambda x: print(x))\n \"\"\"\n return eviction_manager.EvictionBase.get(name, **kwargs)", "docstring": "\"\"\"Generate specific CacheStorage with the configuration.\n\n :param name: the name of the eviction, like: memory\n :type name: str\n\n :param policy: eviction strategy\n :type policy: str\n :param maxsize: the maxsize of cache data\n :type maxsize: int\n :param clean_size: will clean the size of data when the size of cache data reaches the max size\n :type clean_size: int\n :param on_evict: the function for cleaning the data in the store\n :type on_evict: Callable[[List[Any]], None]\n\n Example:\n .. code-block:: python\n\n from gptcache.manager import EvictionBase\n\n cache_base = EvictionBase('memory', policy='lru', maxsize=10, clean_size=2, on_evict=lambda x: print(x))\n \"\"\"", "url": "https://github.com/zilliztech/GPTCache/blob/48f8e768d7dcd7f66d948ad07914a630a382b45b/gptcache/manager/eviction/__init__.py#L10-L32", "sha": "48f8e768d7dcd7f66d948ad07914a630a382b45b", "code/function": "def EvictionBase(name: str, **kwargs):\n \n return eviction_manager.EvictionBase.get(name, **kwargs)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "write", "code": "def write(self, branches=True) -> None:\n \"\"\"\n Writes to the conversation log.\n \"\"\"\n # create directory if it doesn't exist\n Path(self.logfile).parent.mkdir(parents=True, exist_ok=True)\n\n # write current branch\n self.log.write_jsonl(self.logfile)\n\n # write other branches\n # FIXME: wont write main branch if on a different branch\n if branches:\n branches_dir = self.logdir / \"branches\"\n branches_dir.mkdir(parents=True, exist_ok=True)\n for branch, log in self._branches.items():\n if branch == \"main\":\n continue\n branch_path = branches_dir / f\"{branch}.jsonl\"\n log.write_jsonl(branch_path)", "docstring": "\"\"\"\n Writes to the conversation log.\n \"\"\"", "url": "https://github.com/ErikBjare/gptme/blob/ebc076bb75a3af2eafbe498634abb032772f11df/gptme/logmanager.py#L167-L186", "sha": "ebc076bb75a3af2eafbe498634abb032772f11df", "code/function": "def write(self, branches=True) -> None:\n \n # create directory if it doesn't exist\n Path(self.logfile).parent.mkdir(parents=True, exist_ok=True)\n\n # write current branch\n self.log.write_jsonl(self.logfile)\n\n # write other branches\n # FIXME: wont write main branch if on a different branch\n if branches:\n branches_dir = self.logdir / \"branches\"\n branches_dir.mkdir(parents=True, exist_ok=True)\n for branch, log in self._branches.items():\n if branch == \"main\":\n continue\n branch_path = branches_dir / f\"{branch}.jsonl\"\n log.write_jsonl(branch_path)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "set_api_key", "code": "def set_api_key(self, key: str):\n \"\"\"\n Set the API key for authentication.\n\n Args:\n key (str): The API key string.\n \"\"\"\n self.api_key = key", "docstring": "\"\"\"\n Set the API key for authentication.\n\n Args:\n key (str): The API key string.\n \"\"\"", "url": "https://github.com/derronqi/yolov8-face/blob/18f9fde9862ecee74a28e56a8f09bbfc3bcff6d4/ultralytics/hub/auth.py#L132-L139", "sha": "18f9fde9862ecee74a28e56a8f09bbfc3bcff6d4", "code/function": "def set_api_key(self, key: str):\n \n self.api_key = key"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self, c1, c2, num_heads, num_layers):\n \"\"\"Initialize a Transformer module with position embedding and specified number of heads and layers.\"\"\"\n super().__init__()\n self.conv = None\n if c1 != c2:\n self.conv = Conv(c1, c2)\n self.linear = nn.Linear(c2, c2) # learnable position embedding\n self.tr = nn.Sequential(*(TransformerLayer(c2, num_heads) for _ in range(num_layers)))\n self.c2 = c2", "docstring": "\"\"\"Initialize a Transformer module with position embedding and specified number of heads and layers.\"\"\"", "url": "https://github.com/derronqi/yolov8-face/blob/18f9fde9862ecee74a28e56a8f09bbfc3bcff6d4/ultralytics/nn/modules/transformer.py#L123-L131", "sha": "18f9fde9862ecee74a28e56a8f09bbfc3bcff6d4", "code/function": "def __init__(self, c1, c2, num_heads, num_layers):\n \n super().__init__()\n self.conv = None\n if c1 != c2:\n self.conv = Conv(c1, c2)\n self.linear = nn.Linear(c2, c2) # learnable position embedding\n self.tr = nn.Sequential(*(TransformerLayer(c2, num_heads) for _ in range(num_layers)))\n self.c2 = c2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "makedir_exist_ok", "code": "def makedir_exist_ok(dirpath):\n \"\"\"\n Python2 support for os.makedirs(.., exist_ok=True)\n \"\"\"\n try:\n os.makedirs(dirpath)\n except OSError as e:\n if e.errno == errno.EEXIST:\n pass\n else:\n raise", "docstring": "\"\"\"\n Python2 support for os.makedirs(.., exist_ok=True)\n \"\"\"", "url": "https://github.com/Xiuyu-Li/q-diffusion/blob/715783da70baa267321d6700ceb8941400c309d1/ddim/datasets/utils.py#L36-L46", "sha": "715783da70baa267321d6700ceb8941400c309d1", "code/function": "def makedir_exist_ok(dirpath):\n \n try:\n os.makedirs(dirpath)\n except OSError as e:\n if e.errno == errno.EEXIST:\n pass\n else:\n raise"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(\n self,\n dataset_name,\n tasks=None,\n distributed=True,\n output_dir=None,\n *,\n max_dets_per_image=None,\n ):\n \"\"\"\n Args:\n dataset_name (str): name of the dataset to be evaluated.\n It must have the following corresponding metadata:\n \"json_file\": the path to the LVIS format annotation\n tasks (tuple[str]): tasks that can be evaluated under the given\n configuration. A task is one of \"bbox\", \"segm\".\n By default, will infer this automatically from predictions.\n distributed (True): if True, will collect results from all ranks for evaluation.\n Otherwise, will evaluate the results in the current process.\n output_dir (str): optional, an output directory to dump results.\n max_dets_per_image (None or int): limit on maximum detections per image in evaluating AP\n This limit, by default of the LVIS dataset, is 300.\n \"\"\"\n from lvis import LVIS\n\n self._logger = logging.getLogger(__name__)\n\n if tasks is not None and isinstance(tasks, CfgNode):\n self._logger.warn(\n \"COCO Evaluator instantiated using config, this is deprecated behavior.\"\n \" Please pass in explicit arguments instead.\"\n )\n self._tasks = None # Infering it from predictions should be better\n else:\n self._tasks = tasks\n\n self._distributed = distributed\n self._output_dir = output_dir\n self._max_dets_per_image = max_dets_per_image\n\n self._cpu_device = torch.device(\"cpu\")\n\n self._metadata = MetadataCatalog.get(dataset_name)\n json_file = PathManager.get_local_path(self._metadata.json_file)\n self._lvis_api = LVIS(json_file)\n # Test set json files do not contain annotations (evaluation must be\n # performed using the LVIS evaluation server).\n self._do_evaluation = len(self._lvis_api.get_ann_ids()) > 0\n file_path = \"/home/ubuntu/efs/Detic/output/Detic/Detic_LI_CLIP_R5021k_640b64_4x_ft4x_max-size/inference_lvis_v1_val/instances_predictions.pth\"\n with PathManager.open(file_path, \"rb\") as f:\n self._predictions = torch.load(f)", "docstring": "\"\"\"\n Args:\n dataset_name (str): name of the dataset to be evaluated.\n It must have the following corresponding metadata:\n \"json_file\": the path to the LVIS format annotation\n tasks (tuple[str]): tasks that can be evaluated under the given\n configuration. A task is one of \"bbox\", \"segm\".\n By default, will infer this automatically from predictions.\n distributed (True): if True, will collect results from all ranks for evaluation.\n Otherwise, will evaluate the results in the current process.\n output_dir (str): optional, an output directory to dump results.\n max_dets_per_image (None or int): limit on maximum detections per image in evaluating AP\n This limit, by default of the LVIS dataset, is 300.\n \"\"\"", "url": "https://github.com/amazon-science/prompt-pretraining/blob/24bca56b21b4fab1d493c8758c31fd6d1c40bb96/third_party/Detic/cherry_pick.py#L627-L677", "sha": "24bca56b21b4fab1d493c8758c31fd6d1c40bb96", "code/function": "def __init__(\n self,\n dataset_name,\n tasks=None,\n distributed=True,\n output_dir=None,\n *,\n max_dets_per_image=None,\n ):\n \n from lvis import LVIS\n\n self._logger = logging.getLogger(__name__)\n\n if tasks is not None and isinstance(tasks, CfgNode):\n self._logger.warn(\n \"COCO Evaluator instantiated using config, this is deprecated behavior.\"\n \" Please pass in explicit arguments instead.\"\n )\n self._tasks = None # Infering it from predictions should be better\n else:\n self._tasks = tasks\n\n self._distributed = distributed\n self._output_dir = output_dir\n self._max_dets_per_image = max_dets_per_image\n\n self._cpu_device = torch.device(\"cpu\")\n\n self._metadata = MetadataCatalog.get(dataset_name)\n json_file = PathManager.get_local_path(self._metadata.json_file)\n self._lvis_api = LVIS(json_file)\n # Test set json files do not contain annotations (evaluation must be\n # performed using the LVIS evaluation server).\n self._do_evaluation = len(self._lvis_api.get_ann_ids()) > 0\n file_path = \"/home/ubuntu/efs/Detic/output/Detic/Detic_LI_CLIP_R5021k_640b64_4x_ft4x_max-size/inference_lvis_v1_val/instances_predictions.pth\"\n with PathManager.open(file_path, \"rb\") as f:\n self._predictions = torch.load(f)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "gen_single_level_base_anchors", "code": "def gen_single_level_base_anchors(self, base_sizes_per_level, center=None):\n \"\"\"Generate base anchors of a single level.\n\n Args:\n base_sizes_per_level (list[tuple[int, int]]): Basic sizes of\n anchors.\n center (tuple[float], optional): The center of the base anchor\n related to a single feature grid. Defaults to None.\n\n Returns:\n torch.Tensor: Anchors in a single-level feature maps.\n \"\"\"\n x_center, y_center = center\n base_anchors = []\n for base_size in base_sizes_per_level:\n w, h = base_size\n\n # use float anchor and the anchor's center is aligned with the\n # pixel center\n base_anchor = torch.Tensor([\n x_center - 0.5 * w, y_center - 0.5 * h, x_center + 0.5 * w,\n y_center + 0.5 * h\n ])\n base_anchors.append(base_anchor)\n base_anchors = torch.stack(base_anchors, dim=0)\n\n return base_anchors", "docstring": "\"\"\"Generate base anchors of a single level.\n\n Args:\n base_sizes_per_level (list[tuple[int, int]]): Basic sizes of\n anchors.\n center (tuple[float], optional): The center of the base anchor\n related to a single feature grid. Defaults to None.\n\n Returns:\n torch.Tensor: Anchors in a single-level feature maps.\n \"\"\"", "url": "https://github.com/MingXiangL/AttentionShift/blob/dc3b87d35d2334d8675cb899ead2c02d74c163c1/mmdet/core/anchor/anchor_generator.py#L639-L665", "sha": "dc3b87d35d2334d8675cb899ead2c02d74c163c1", "code/function": "def gen_single_level_base_anchors(self, base_sizes_per_level, center=None):\n \n x_center, y_center = center\n base_anchors = []\n for base_size in base_sizes_per_level:\n w, h = base_size\n\n # use float anchor and the anchor's center is aligned with the\n # pixel center\n base_anchor = torch.Tensor([\n x_center - 0.5 * w, y_center - 0.5 * h, x_center + 0.5 * w,\n y_center + 0.5 * h\n ])\n base_anchors.append(base_anchor)\n base_anchors = torch.stack(base_anchors, dim=0)\n\n return base_anchors"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "uuid8", "code": "def uuid8(bytes):\n \"\"\"Generate a custom UUID comprised almost entirely of user-supplied bytes..\"\"\"\n return UUID(bytes=uuid_utils.uuid8(bytes).bytes)", "docstring": "\"\"\"Generate a custom UUID comprised almost entirely of user-supplied bytes..\"\"\"", "url": "https://github.com/aminalaee/uuid-utils/blob/9ddd132c46278ac8aeb70474e688acec3465ce30/python/uuid_utils/compat/__init__.py#L75-L77", "sha": "9ddd132c46278ac8aeb70474e688acec3465ce30", "code/function": "def uuid8(bytes):\n \n return UUID(bytes=uuid_utils.uuid8(bytes).bytes)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self, calibrator_name=None, **kwargs):\n '''\n Arguments:\n calibrator_name.\n Default: if set to None it will be the same as the class name.\n Please be reminded that if in the model there are many calibrators\n of the same type the calibrator_name should be changed to avoid confusion.\n '''\n self._calibrated = False\n if calibrator_name is None:\n calibrator_name = twml.util.to_snake_case(self.__class__.__name__)\n self._calibrator_name = calibrator_name\n self._kwargs = kwargs", "docstring": "'''\n Arguments:\n calibrator_name.\n Default: if set to None it will be the same as the class name.\n Please be reminded that if in the model there are many calibrators\n of the same type the calibrator_name should be changed to avoid confusion.\n '''", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/twml/twml/contrib/calibrators/calibrator.py#L62-L74", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9", "code/function": "def __init__(self, calibrator_name=None, **kwargs):\n \n self._calibrated = False\n if calibrator_name is None:\n calibrator_name = twml.util.to_snake_case(self.__class__.__name__)\n self._calibrator_name = calibrator_name\n self._kwargs = kwargs"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_absolute_position_to_relative_position", "code": "def _absolute_position_to_relative_position(self, x):\n \"\"\"\n x: [b, h, l, l]\n ret: [b, h, l, 2*l-1]\n \"\"\"\n batch, heads, length, _ = x.size()\n # padd along column\n x = F.pad(\n x,\n [0, length - 1, 0, 0, 0, 0, 0, 0],\n )\n x_flat = x.view([batch, heads, length*length + length * (length - 1)])\n # add 0's in the beginning that will skew the elements after reshape\n x_flat = F.pad(\n x_flat,\n [length, 0, 0, 0, 0, 0],\n )\n x_final = x_flat.view([batch, heads, length, 2 * length])[:, :, :, 1:]\n return x_final", "docstring": "\"\"\"\n x: [b, h, l, l]\n ret: [b, h, l, 2*l-1]\n \"\"\"", "url": "https://github.com/RVC-Project/Retrieval-based-Voice-Conversion-WebUI/blob/7ef19867780cf703841ebafb565a4e47d1ea86ff/infer/lib/infer_pack/attentions_onnx.py#L356-L374", "sha": "7ef19867780cf703841ebafb565a4e47d1ea86ff", "code/function": "def _absolute_position_to_relative_position(self, x):\n \n batch, heads, length, _ = x.size()\n # padd along column\n x = F.pad(\n x,\n [0, length - 1, 0, 0, 0, 0, 0, 0],\n )\n x_flat = x.view([batch, heads, length*length + length * (length - 1)])\n # add 0's in the beginning that will skew the elements after reshape\n x_flat = F.pad(\n x_flat,\n [length, 0, 0, 0, 0, 0],\n )\n x_final = x_flat.view([batch, heads, length, 2 * length])[:, :, :, 1:]\n return x_final"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SolutionManager.create", "code": "@staticmethod\n def create(solver_factory: SolverFactory[Solution_] | SolverManager[Solution_, Any]) -> \\\n 'SolutionManager[Solution_]':\n \"\"\"\n Uses a `SolverFactory` or `SolverManager` to build a SolutionManager.\n\n Parameters\n ----------\n solver_factory : SolverFactory | SolverManager\n\n Returns\n -------\n SolutionManager\n A `SolutionManager` instance.\n \"\"\"\n from ai.timefold.solver.core.api.solver import SolutionManager as JavaSolutionManager\n return SolutionManager(JavaSolutionManager.create(solver_factory._delegate))", "docstring": "\"\"\"\n Uses a `SolverFactory` or `SolverManager` to build a SolutionManager.\n\n Parameters\n ----------\n solver_factory : SolverFactory | SolverManager\n\n Returns\n -------\n SolutionManager\n A `SolutionManager` instance.\n \"\"\"", "url": "https://github.com/TimefoldAI/timefold-solver/blob/f67c507a421ee113dd2e76f825480aa058b14767/python/python-core/src/main/python/_solution_manager.py#L29-L45", "sha": "f67c507a421ee113dd2e76f825480aa058b14767", "code/function": "@staticmethod\n def create(solver_factory: SolverFactory[Solution_] | SolverManager[Solution_, Any]) -> \\\n 'SolutionManager[Solution_]':\n \n from ai.timefold.solver.core.api.solver import SolutionManager as JavaSolutionManager\n return SolutionManager(JavaSolutionManager.create(solver_factory._delegate))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "diff", "code": "def diff(self, other: 'ScoreAnalysis') -> 'ScoreAnalysis':\n \"\"\"\n Compare this `ScoreAnalysis to another `ScoreAnalysis`\n and retrieve the difference between them.\n The comparison is in the direction of `this - other`.\n\n Example: if `this` has a score of 100 and `other` has a score of 90,\n the returned score will be 10.\n If this and other were inverted, the score would have been -10.\n The same applies to all other properties of `ScoreAnalysis`.\n\n In order to properly diff `MatchAnalysis` against each other,\n we rely on the user implementing `ConstraintJustification` equality correctly.\n In other words, the diff will consider two justifications equal if the user says they are equal,\n and it expects the hash code to be consistent with equals.\n\n If one `ScoreAnalysis` provides `MatchAnalysis` and the other doesn't, exception is thrown.\n Such `ScoreAnalysis` instances are mutually incompatible.\n\n Parameters\n ----------\n other : ScoreAnalysis\n\n Returns\n -------\n ScoreExplanation\n The `ScoreAnalysis` corresponding to the diff.\n \"\"\"\n return ScoreAnalysis(self._delegate.diff(other._delegate))", "docstring": "\"\"\"\n Compare this `ScoreAnalysis to another `ScoreAnalysis`\n and retrieve the difference between them.\n The comparison is in the direction of `this - other`.\n\n Example: if `this` has a score of 100 and `other` has a score of 90,\n the returned score will be 10.\n If this and other were inverted, the score would have been -10.\n The same applies to all other properties of `ScoreAnalysis`.\n\n In order to properly diff `MatchAnalysis` against each other,\n we rely on the user implementing `ConstraintJustification` equality correctly.\n In other words, the diff will consider two justifications equal if the user says they are equal,\n and it expects the hash code to be consistent with equals.\n\n If one `ScoreAnalysis` provides `MatchAnalysis` and the other doesn't, exception is thrown.\n Such `ScoreAnalysis` instances are mutually incompatible.\n\n Parameters\n ----------\n other : ScoreAnalysis\n\n Returns\n -------\n ScoreExplanation\n The `ScoreAnalysis` corresponding to the diff.\n \"\"\"", "url": "https://github.com/TimefoldAI/timefold-solver/blob/f67c507a421ee113dd2e76f825480aa058b14767/python/python-core/src/main/python/score/_score_analysis.py#L631-L659", "sha": "f67c507a421ee113dd2e76f825480aa058b14767", "code/function": "def diff(self, other: 'ScoreAnalysis') -> 'ScoreAnalysis':\n \n return ScoreAnalysis(self._delegate.diff(other._delegate))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "JaxNav.reset", "code": "@partial(jax.jit, static_argnums=[0]) \n def reset(self, key: chex.PRNGKey) -> Tuple[Dict[str, chex.Array], State]:\n \"\"\" Reset environment. Returns initial agent observations, states and the enviornment state \"\"\"\n \n state = self.sample_test_case(key)\n obs = self._get_obs(state)\n return {a: obs[i] for i, a in enumerate(self.agents)}, state", "docstring": "\"\"\" Reset environment. Returns initial agent observations, states and the enviornment state \"\"\"", "url": "https://github.com/FLAIROx/JaxMARL/blob/3dc2cf6e002b5f1b97ce3edd45aff7e1c003f8e3/jaxmarl/environments/jaxnav/jaxnav_env.py#L202-L208", "sha": "3dc2cf6e002b5f1b97ce3edd45aff7e1c003f8e3", "code/function": "@partial(jax.jit, static_argnums=[0]) \n def reset(self, key: chex.PRNGKey) -> Tuple[Dict[str, chex.Array], State]:\n \n \n state = self.sample_test_case(key)\n obs = self._get_obs(state)\n return {a: obs[i] for i, a in enumerate(self.agents)}, state"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "action_space", "code": "def action_space(\n self, agent_id: Union[int, None] = None\n ) -> spaces.Discrete:\n \"\"\"Action space of the environment.\"\"\"\n return spaces.Discrete(len(Actions))", "docstring": "\"\"\"Action space of the environment.\"\"\"", "url": "https://github.com/FLAIROx/JaxMARL/blob/3dc2cf6e002b5f1b97ce3edd45aff7e1c003f8e3/jaxmarl/environments/storm/storm_2p.py#L921-L925", "sha": "3dc2cf6e002b5f1b97ce3edd45aff7e1c003f8e3", "code/function": "def action_space(\n self, agent_id: Union[int, None] = None\n ) -> spaces.Discrete:\n \n return spaces.Discrete(len(Actions))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "encode_teacher", "code": "def encode_teacher(self, image):\n \"\"\"encode image / videos as features.\n\n Args:\n image (torch.Tensor): The input images.\n\n Returns: tuple.\n - mask (torch.Tensor): Mask. Shape: [B,N1].\n - clip_output (torch.Tensor): The features of clip. Shape: [K,B,N,C].\n\n \"\"\"\n B, C, T, H, W = image.shape\n mask_type = self.image_mask_type if T == 1 else self.video_mask_type\n window_size = self.image_window_size if T == 1 else self.video_window_size\n mask_ratio = self.image_mask_ratio if T == 1 else self.video_mask_ratio\n \n if self.clip_teacher is None or self.loss_weight.uta == 0:\n return None, None\n\n if H != self.clip_img_size:\n image = torch.nn.functional.interpolate(\n image.reshape(B, C*T, H, W), \n size=(self.clip_img_size, self.clip_img_size), \n mode='bicubic', align_corners=False\n )\n image = image.view(B, C, T, self.clip_img_size, self.clip_img_size)\n\n with torch.no_grad():\n if mask_type == 'tube':\n mask = TubeMaskingGenerator(window_size, mask_ratio, B)\n clip_output, attn = self.clip_teacher(image)\n elif mask_type == 'random':\n mask = RandomMaskingGenerator(window_size, mask_ratio, B)\n clip_output, attn = self.clip_teacher(image)\n elif mask_type in 'attention':\n clip_output, attn = self.clip_teacher(image)\n BT, N = attn.shape\n N_vis = N - int(N * mask_ratio)\n importance = torch.multinomial(attn, N)\n mask = torch.ones((BT, N))\n pos1 = torch.arange(BT).view(-1, 1).repeat(1, N_vis)\n pos2 = importance[:, :N_vis]\n mask[pos1, pos2] = 0\n mask = mask.view(B, -1).to(torch.bool)\n else:\n raise NotImplementedError\n \n # mask clip output\n K, _, _, C = clip_output.shape\n mask_clip = mask.unsqueeze(0).repeat(K, 1, 1)\n clip_output = clip_output[~mask_clip].reshape(K, B, -1, C)\n \n return mask, clip_output", "docstring": "\"\"\"encode image / videos as features.\n\n Args:\n image (torch.Tensor): The input images.\n\n Returns: tuple.\n - mask (torch.Tensor): Mask. Shape: [B,N1].\n - clip_output (torch.Tensor): The features of clip. Shape: [K,B,N,C].\n\n \"\"\"", "url": "https://github.com/OpenGVLab/unmasked_teacher/blob/4fb4049f5a87919882e68ccc427615ae7dab1c33/multi_modality/models/umt.py#L117-L169", "sha": "4fb4049f5a87919882e68ccc427615ae7dab1c33", "code/function": "def encode_teacher(self, image):\n \n B, C, T, H, W = image.shape\n mask_type = self.image_mask_type if T == 1 else self.video_mask_type\n window_size = self.image_window_size if T == 1 else self.video_window_size\n mask_ratio = self.image_mask_ratio if T == 1 else self.video_mask_ratio\n \n if self.clip_teacher is None or self.loss_weight.uta == 0:\n return None, None\n\n if H != self.clip_img_size:\n image = torch.nn.functional.interpolate(\n image.reshape(B, C*T, H, W), \n size=(self.clip_img_size, self.clip_img_size), \n mode='bicubic', align_corners=False\n )\n image = image.view(B, C, T, self.clip_img_size, self.clip_img_size)\n\n with torch.no_grad():\n if mask_type == 'tube':\n mask = TubeMaskingGenerator(window_size, mask_ratio, B)\n clip_output, attn = self.clip_teacher(image)\n elif mask_type == 'random':\n mask = RandomMaskingGenerator(window_size, mask_ratio, B)\n clip_output, attn = self.clip_teacher(image)\n elif mask_type in 'attention':\n clip_output, attn = self.clip_teacher(image)\n BT, N = attn.shape\n N_vis = N - int(N * mask_ratio)\n importance = torch.multinomial(attn, N)\n mask = torch.ones((BT, N))\n pos1 = torch.arange(BT).view(-1, 1).repeat(1, N_vis)\n pos2 = importance[:, :N_vis]\n mask[pos1, pos2] = 0\n mask = mask.view(B, -1).to(torch.bool)\n else:\n raise NotImplementedError\n \n # mask clip output\n K, _, _, C = clip_output.shape\n mask_clip = mask.unsqueeze(0).repeat(K, 1, 1)\n clip_output = clip_output[~mask_clip].reshape(K, B, -1, C)\n \n return mask, clip_output"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "load_lvis_json", "code": "def load_lvis_json(json_file, image_root, dataset_name=None):\n \"\"\"\n Load a json file in LVIS's annotation format.\n\n Args:\n json_file (str): full path to the LVIS json annotation file.\n image_root (str): the directory where the images in this json file exists.\n dataset_name (str): the name of the dataset (e.g., \"lvis_v0.5_train\").\n If provided, this function will put \"thing_classes\" into the metadata\n associated with this dataset.\n\n Returns:\n list[dict]: a list of dicts in Detectron2 standard format. (See\n `Using Custom Datasets `_ )\n\n Notes:\n 1. This function does not read the image files.\n The results do not have the \"image\" field.\n \"\"\"\n from lvis import LVIS\n\n json_file = PathManager.get_local_path(json_file)\n\n timer = Timer()\n lvis_api = LVIS(json_file)\n if timer.seconds() > 1:\n logger.info(\"Loading {} takes {:.2f} seconds.\".format(json_file, timer.seconds()))\n\n if dataset_name is not None:\n meta = get_lvis_instances_meta(dataset_name)\n MetadataCatalog.get(dataset_name).set(**meta)\n\n # sort indices for reproducible results\n img_ids = sorted(lvis_api.imgs.keys())\n # imgs is a list of dicts, each looks something like:\n # {'license': 4,\n # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg',\n # 'file_name': 'COCO_val2014_000000001268.jpg',\n # 'height': 427,\n # 'width': 640,\n # 'date_captured': '2013-11-17 05:57:24',\n # 'id': 1268}\n imgs = lvis_api.load_imgs(img_ids)\n # anns is a list[list[dict]], where each dict is an annotation\n # record for an object. The inner list enumerates the objects in an image\n # and the outer list enumerates over images. Example of anns[0]:\n # [{'segmentation': [[192.81,\n # 247.09,\n # ...\n # 219.03,\n # 249.06]],\n # 'area': 1035.749,\n # 'image_id': 1268,\n # 'bbox': [192.81, 224.8, 74.73, 33.43],\n # 'category_id': 16,\n # 'id': 42986},\n # ...]\n anns = [lvis_api.img_ann_map[img_id] for img_id in img_ids]\n\n # Sanity check that each annotation has a unique id\n ann_ids = [ann[\"id\"] for anns_per_image in anns for ann in anns_per_image]\n assert len(set(ann_ids)) == len(ann_ids), \"Annotation ids in '{}' are not unique\".format(\n json_file\n )\n\n imgs_anns = list(zip(imgs, anns))\n\n logger.info(\"Loaded {} images in the LVIS format from {}\".format(len(imgs_anns), json_file))\n\n def get_file_name(img_root, img_dict):\n # Determine the path including the split folder (\"train2017\", \"val2017\", \"test2017\") from\n # the coco_url field. Example:\n # 'coco_url': 'http://images.cocodataset.org/train2017/000000155379.jpg'\n split_folder, file_name = img_dict[\"coco_url\"].split(\"/\")[-2:]\n return os.path.join(img_root + split_folder, file_name)\n\n dataset_dicts = []\n\n for (img_dict, anno_dict_list) in imgs_anns:\n record = {}\n record[\"file_name\"] = get_file_name(image_root, img_dict)\n record[\"height\"] = img_dict[\"height\"]\n record[\"width\"] = img_dict[\"width\"]\n record[\"not_exhaustive_category_ids\"] = img_dict.get(\"not_exhaustive_category_ids\", [])\n record[\"neg_category_ids\"] = img_dict.get(\"neg_category_ids\", [])\n image_id = record[\"image_id\"] = img_dict[\"id\"]\n\n objs = []\n for anno in anno_dict_list:\n # Check that the image_id in this annotation is the same as\n # the image_id we're looking at.\n # This fails only when the data parsing logic or the annotation file is buggy.\n assert anno[\"image_id\"] == image_id\n obj = {\"bbox\": anno[\"bbox\"], \"bbox_mode\": BoxMode.XYWH_ABS}\n obj[\"category_id\"] = anno[\"category_id\"] - 1 # Convert 1-indexed to 0-indexed\n segm = anno[\"segmentation\"] # list[list[float]]\n # filter out invalid polygons (< 3 points)\n valid_segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]\n assert len(segm) == len(\n valid_segm\n ), \"Annotation contains an invalid polygon with < 3 points\"\n assert len(segm) > 0\n obj[\"segmentation\"] = segm\n objs.append(obj)\n record[\"annotations\"] = objs\n dataset_dicts.append(record)\n\n return dataset_dicts", "docstring": "\"\"\"\n Load a json file in LVIS's annotation format.\n\n Args:\n json_file (str): full path to the LVIS json annotation file.\n image_root (str): the directory where the images in this json file exists.\n dataset_name (str): the name of the dataset (e.g., \"lvis_v0.5_train\").\n If provided, this function will put \"thing_classes\" into the metadata\n associated with this dataset.\n\n Returns:\n list[dict]: a list of dicts in Detectron2 standard format. (See\n `Using Custom Datasets `_ )\n\n Notes:\n 1. This function does not read the image files.\n The results do not have the \"image\" field.\n \"\"\"", "url": "https://github.com/hujiecpp/YOSO/blob/04b898d395ffd8318aa3761b0b2b6d20b3514f26/detectron2/data/datasets/lvis.py#L40-L147", "sha": "04b898d395ffd8318aa3761b0b2b6d20b3514f26", "code/function": "def load_lvis_json(json_file, image_root, dataset_name=None):\n \n from lvis import LVIS\n\n json_file = PathManager.get_local_path(json_file)\n\n timer = Timer()\n lvis_api = LVIS(json_file)\n if timer.seconds() > 1:\n logger.info(\"Loading {} takes {:.2f} seconds.\".format(json_file, timer.seconds()))\n\n if dataset_name is not None:\n meta = get_lvis_instances_meta(dataset_name)\n MetadataCatalog.get(dataset_name).set(**meta)\n\n # sort indices for reproducible results\n img_ids = sorted(lvis_api.imgs.keys())\n # imgs is a list of dicts, each looks something like:\n # {'license': 4,\n # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg',\n # 'file_name': 'COCO_val2014_000000001268.jpg',\n # 'height': 427,\n # 'width': 640,\n # 'date_captured': '2013-11-17 05:57:24',\n # 'id': 1268}\n imgs = lvis_api.load_imgs(img_ids)\n # anns is a list[list[dict]], where each dict is an annotation\n # record for an object. The inner list enumerates the objects in an image\n # and the outer list enumerates over images. Example of anns[0]:\n # [{'segmentation': [[192.81,\n # 247.09,\n # ...\n # 219.03,\n # 249.06]],\n # 'area': 1035.749,\n # 'image_id': 1268,\n # 'bbox': [192.81, 224.8, 74.73, 33.43],\n # 'category_id': 16,\n # 'id': 42986},\n # ...]\n anns = [lvis_api.img_ann_map[img_id] for img_id in img_ids]\n\n # Sanity check that each annotation has a unique id\n ann_ids = [ann[\"id\"] for anns_per_image in anns for ann in anns_per_image]\n assert len(set(ann_ids)) == len(ann_ids), \"Annotation ids in '{}' are not unique\".format(\n json_file\n )\n\n imgs_anns = list(zip(imgs, anns))\n\n logger.info(\"Loaded {} images in the LVIS format from {}\".format(len(imgs_anns), json_file))\n\n def get_file_name(img_root, img_dict):\n # Determine the path including the split folder (\"train2017\", \"val2017\", \"test2017\") from\n # the coco_url field. Example:\n # 'coco_url': 'http://images.cocodataset.org/train2017/000000155379.jpg'\n split_folder, file_name = img_dict[\"coco_url\"].split(\"/\")[-2:]\n return os.path.join(img_root + split_folder, file_name)\n\n dataset_dicts = []\n\n for (img_dict, anno_dict_list) in imgs_anns:\n record = {}\n record[\"file_name\"] = get_file_name(image_root, img_dict)\n record[\"height\"] = img_dict[\"height\"]\n record[\"width\"] = img_dict[\"width\"]\n record[\"not_exhaustive_category_ids\"] = img_dict.get(\"not_exhaustive_category_ids\", [])\n record[\"neg_category_ids\"] = img_dict.get(\"neg_category_ids\", [])\n image_id = record[\"image_id\"] = img_dict[\"id\"]\n\n objs = []\n for anno in anno_dict_list:\n # Check that the image_id in this annotation is the same as\n # the image_id we're looking at.\n # This fails only when the data parsing logic or the annotation file is buggy.\n assert anno[\"image_id\"] == image_id\n obj = {\"bbox\": anno[\"bbox\"], \"bbox_mode\": BoxMode.XYWH_ABS}\n obj[\"category_id\"] = anno[\"category_id\"] - 1 # Convert 1-indexed to 0-indexed\n segm = anno[\"segmentation\"] # list[list[float]]\n # filter out invalid polygons (< 3 points)\n valid_segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]\n assert len(segm) == len(\n valid_segm\n ), \"Annotation contains an invalid polygon with < 3 points\"\n assert len(segm) > 0\n obj[\"segmentation\"] = segm\n objs.append(obj)\n record[\"annotations\"] = objs\n dataset_dicts.append(record)\n\n return dataset_dicts"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__call__", "code": "def __call__(self, dataset_dict):\n \"\"\"\n Args:\n dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.\n\n Returns:\n dict: a format that builtin models in detectron2 accept\n \"\"\"\n # assert self.is_train, \"MaskFormerSemanticDatasetMapper should only be used for training!\"\n\n dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below\n image = utils.read_image(dataset_dict[\"file_name\"], format=self.img_format)\n utils.check_image_size(dataset_dict, image)\n\n if \"sem_seg_file_name\" in dataset_dict:\n # PyTorch transformation not implemented for uint16, so converting it to double first\n sem_seg_gt = utils.read_image(dataset_dict.pop(\"sem_seg_file_name\")).astype(\n \"double\"\n )\n else:\n sem_seg_gt = None\n\n if sem_seg_gt is None:\n raise ValueError(\n \"Cannot find 'sem_seg_file_name' for semantic segmentation dataset {}.\".format(\n dataset_dict[\"file_name\"]\n )\n )\n\n aug_input = T.AugInput(image, sem_seg=sem_seg_gt)\n aug_input.category_id = dataset_dict[\"category_id\"]\n aug_input, transforms = T.apply_transform_gens(self.tfm_gens, aug_input)\n image = aug_input.image\n sem_seg_gt = aug_input.sem_seg\n\n # Pad image and segmentation label here!\n image = torch.as_tensor(np.ascontiguousarray(image.transpose(2, 0, 1)))\n if sem_seg_gt is not None:\n sem_seg_gt = torch.as_tensor(sem_seg_gt.astype(\"long\"))\n\n if self.size_divisibility > 0:\n image_size = (image.shape[-2], image.shape[-1])\n padding_size = [\n 0,\n self.size_divisibility - image_size[1],\n 0,\n self.size_divisibility - image_size[0],\n ]\n image = F.pad(image, padding_size, value=128).contiguous()\n if sem_seg_gt is not None:\n sem_seg_gt = F.pad(\n sem_seg_gt, padding_size, value=self.ignore_label\n ).contiguous()\n\n image_shape = (image.shape[-2], image.shape[-1]) # h, w\n\n # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory,\n # but not efficient on large generic data structures due to the use of pickle & mp.Queue.\n # Therefore it's important to use torch.Tensor.\n dataset_dict[\"image\"] = image\n\n if sem_seg_gt is not None:\n dataset_dict[\"sem_seg\"] = sem_seg_gt.long()\n\n if \"annotations\" in dataset_dict:\n raise ValueError(\n \"Semantic segmentation dataset should not have 'annotations'.\"\n )\n\n # Prepare per-category binary masks\n if sem_seg_gt is not None:\n sem_seg_gt = sem_seg_gt.numpy()\n instances = Instances(image_shape)\n instances.gt_classes = torch.tensor(\n [dataset_dict[\"category_id\"]], dtype=torch.int64\n )\n\n masks = []\n masks.append(sem_seg_gt == dataset_dict[\"category_id\"])\n if masks[0].sum() == 0:\n return None\n if len(masks) == 0:\n # Some image does not have annotation (all ignored)\n instances.gt_masks = torch.zeros(\n (0, sem_seg_gt.shape[-2], sem_seg_gt.shape[-1])\n )\n else:\n masks = BitMasks(\n torch.stack(\n [\n torch.from_numpy(np.ascontiguousarray(x.copy()))\n for x in masks\n ]\n )\n )\n instances.gt_masks = masks.tensor\n\n dataset_dict[\"instances\"] = instances\n\n return dataset_dict", "docstring": "\"\"\"\n Args:\n dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.\n\n Returns:\n dict: a format that builtin models in detectron2 accept\n \"\"\"", "url": "https://github.com/bytedance/FreeSeg/blob/7707335cc3f2a1a73d4d2829f3cdbb0e031d3961/mask2former/data/dataset_mappers/mask_former_binary_semantic_dataset_mapper.py#L95-L194", "sha": "7707335cc3f2a1a73d4d2829f3cdbb0e031d3961", "code/function": "def __call__(self, dataset_dict):\n \n # assert self.is_train, \"MaskFormerSemanticDatasetMapper should only be used for training!\"\n\n dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below\n image = utils.read_image(dataset_dict[\"file_name\"], format=self.img_format)\n utils.check_image_size(dataset_dict, image)\n\n if \"sem_seg_file_name\" in dataset_dict:\n # PyTorch transformation not implemented for uint16, so converting it to double first\n sem_seg_gt = utils.read_image(dataset_dict.pop(\"sem_seg_file_name\")).astype(\n \"double\"\n )\n else:\n sem_seg_gt = None\n\n if sem_seg_gt is None:\n raise ValueError(\n \"Cannot find 'sem_seg_file_name' for semantic segmentation dataset {}.\".format(\n dataset_dict[\"file_name\"]\n )\n )\n\n aug_input = T.AugInput(image, sem_seg=sem_seg_gt)\n aug_input.category_id = dataset_dict[\"category_id\"]\n aug_input, transforms = T.apply_transform_gens(self.tfm_gens, aug_input)\n image = aug_input.image\n sem_seg_gt = aug_input.sem_seg\n\n # Pad image and segmentation label here!\n image = torch.as_tensor(np.ascontiguousarray(image.transpose(2, 0, 1)))\n if sem_seg_gt is not None:\n sem_seg_gt = torch.as_tensor(sem_seg_gt.astype(\"long\"))\n\n if self.size_divisibility > 0:\n image_size = (image.shape[-2], image.shape[-1])\n padding_size = [\n 0,\n self.size_divisibility - image_size[1],\n 0,\n self.size_divisibility - image_size[0],\n ]\n image = F.pad(image, padding_size, value=128).contiguous()\n if sem_seg_gt is not None:\n sem_seg_gt = F.pad(\n sem_seg_gt, padding_size, value=self.ignore_label\n ).contiguous()\n\n image_shape = (image.shape[-2], image.shape[-1]) # h, w\n\n # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory,\n # but not efficient on large generic data structures due to the use of pickle & mp.Queue.\n # Therefore it's important to use torch.Tensor.\n dataset_dict[\"image\"] = image\n\n if sem_seg_gt is not None:\n dataset_dict[\"sem_seg\"] = sem_seg_gt.long()\n\n if \"annotations\" in dataset_dict:\n raise ValueError(\n \"Semantic segmentation dataset should not have 'annotations'.\"\n )\n\n # Prepare per-category binary masks\n if sem_seg_gt is not None:\n sem_seg_gt = sem_seg_gt.numpy()\n instances = Instances(image_shape)\n instances.gt_classes = torch.tensor(\n [dataset_dict[\"category_id\"]], dtype=torch.int64\n )\n\n masks = []\n masks.append(sem_seg_gt == dataset_dict[\"category_id\"])\n if masks[0].sum() == 0:\n return None\n if len(masks) == 0:\n # Some image does not have annotation (all ignored)\n instances.gt_masks = torch.zeros(\n (0, sem_seg_gt.shape[-2], sem_seg_gt.shape[-1])\n )\n else:\n masks = BitMasks(\n torch.stack(\n [\n torch.from_numpy(np.ascontiguousarray(x.copy()))\n for x in masks\n ]\n )\n )\n instances.gt_masks = masks.tensor\n\n dataset_dict[\"instances\"] = instances\n\n return dataset_dict"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "append_form", "code": "def append_form(\n self,\n obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],\n headers: Optional[MultiMapping[str]] = None,\n ) -> Payload:\n \"\"\"Helper to append form urlencoded part.\"\"\"\n assert isinstance(obj, (Sequence, Mapping))\n\n if headers is None:\n headers = CIMultiDict()\n\n if isinstance(obj, Mapping):\n obj = list(obj.items())\n data = urlencode(obj, doseq=True)\n\n return self.append_payload(\n StringPayload(\n data, headers=headers, content_type=\"application/x-www-form-urlencoded\"\n )\n )", "docstring": "\"\"\"Helper to append form urlencoded part.\"\"\"", "url": "https://github.com/TREE-Ind/Blender-GPT/blob/cf8b62ebd327940ec7d1d47cc7baa7fc595ddd44/lib/aiohttp/multipart.py#L845-L864", "sha": "cf8b62ebd327940ec7d1d47cc7baa7fc595ddd44", "code/function": "def append_form(\n self,\n obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],\n headers: Optional[MultiMapping[str]] = None,\n ) -> Payload:\n \n assert isinstance(obj, (Sequence, Mapping))\n\n if headers is None:\n headers = CIMultiDict()\n\n if isinstance(obj, Mapping):\n obj = list(obj.items())\n data = urlencode(obj, doseq=True)\n\n return self.append_payload(\n StringPayload(\n data, headers=headers, content_type=\"application/x-www-form-urlencoded\"\n )\n )"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "generate", "code": "def generate(self: CAUSAL_GPT_TYPES, *args: tuple, **kwargs: Dict[str, Any]):\n \"\"\"Wraps original generate to enable PrefixLM attention.\"\"\"\n attn_modules = _get_attn_modules(model)\n for attn_module in attn_modules:\n attn_module.bias.data[:] = 1\n output = self._original_generate(*args, **kwargs)\n for attn_module in attn_modules:\n attn_module.bias.data = torch.tril(attn_module.bias.data[0, 0])[None, None]\n return output", "docstring": "\"\"\"Wraps original generate to enable PrefixLM attention.\"\"\"", "url": "https://github.com/EvolvingLMMs-Lab/Otter/blob/1e7eb9a6fb12ef410082e796c463b99495637b85/src/otter_ai/models/mpt/hf_prefixlm_converter.py#L161-L169", "sha": "1e7eb9a6fb12ef410082e796c463b99495637b85", "code/function": "def generate(self: CAUSAL_GPT_TYPES, *args: tuple, **kwargs: Dict[str, Any]):\n \n attn_modules = _get_attn_modules(model)\n for attn_module in attn_modules:\n attn_module.bias.data[:] = 1\n output = self._original_generate(*args, **kwargs)\n for attn_module in attn_modules:\n attn_module.bias.data = torch.tril(attn_module.bias.data[0, 0])[None, None]\n return output"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward", "code": "def forward(self, x: torch.Tensor, x_pos: torch.Tensor):\n \"\"\"\n Args:\n x (:obj:`torch.Tensor` of shape ``(..., dim)``): Inputs.\n x_pos (:obj:`torch.Tensor` of shape ``(...)``): Positions of inputs.\n \"\"\"\n x_pos = x_pos * self.distance_scale\n freqs = x_pos[..., None].to(self.dtype) * self.inv_freq[None, :] # (..., dim/2)\n\n # the same implementation as sat\n emb = torch.cat((freqs, freqs), dim=-1) # (..., dim)\n emb_cos = emb.cos() # (..., dim)\n emb_sin = emb.sin() # (..., dim)\n\n rotate_x = torch.cat([-x[..., x.size(-1) // 2 :], x[..., : x.size(-1) // 2]], dim=-1) # (..., dim)\n\n return x * emb_cos + rotate_x * emb_sin", "docstring": "\"\"\"\n Args:\n x (:obj:`torch.Tensor` of shape ``(..., dim)``): Inputs.\n x_pos (:obj:`torch.Tensor` of shape ``(...)``): Positions of inputs.\n \"\"\"", "url": "https://github.com/TsinghuaDatabaseGroup/DB-GPT/blob/0ced623935ae23b390bf7a4bb4de7fb26bbc777a/multiagents/localized_llms/cpm/layers/position_embedding.py#L218-L234", "sha": "0ced623935ae23b390bf7a4bb4de7fb26bbc777a", "code/function": "def forward(self, x: torch.Tensor, x_pos: torch.Tensor):\n \n x_pos = x_pos * self.distance_scale\n freqs = x_pos[..., None].to(self.dtype) * self.inv_freq[None, :] # (..., dim/2)\n\n # the same implementation as sat\n emb = torch.cat((freqs, freqs), dim=-1) # (..., dim)\n emb_cos = emb.cos() # (..., dim)\n emb_sin = emb.sin() # (..., dim)\n\n rotate_x = torch.cat([-x[..., x.size(-1) // 2 :], x[..., : x.size(-1) // 2]], dim=-1) # (..., dim)\n\n return x * emb_cos + rotate_x * emb_sin"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__repr__", "code": "def __repr__(self):\n \"\"\"str: a string that describes the module\"\"\"\n repr_str = self.__class__.__name__\n repr_str += f'(in_channels={self.in_channels}, '\n repr_str += f'feat_channels={self.feat_channels}, '\n repr_str += f'out_channels={self.out_channels_raw}, '\n repr_str += f'input_feat_shape={self.input_feat_shape}, '\n repr_str += f'act_cfg={self.act_cfg}, '\n repr_str += f'norm_cfg={self.norm_cfg})'\n return repr_str", "docstring": "\"\"\"str: a string that describes the module\"\"\"", "url": "https://github.com/hht1996ok/EA-LSS/blob/193c30141da8625f442d10f0fa29c226694bc3c3/mmdetection-2.11.0/mmdet/models/utils/transformer.py#L851-L860", "sha": "193c30141da8625f442d10f0fa29c226694bc3c3", "code/function": "def __repr__(self):\n \n repr_str = self.__class__.__name__\n repr_str += f'(in_channels={self.in_channels}, '\n repr_str += f'feat_channels={self.feat_channels}, '\n repr_str += f'out_channels={self.out_channels_raw}, '\n repr_str += f'input_feat_shape={self.input_feat_shape}, '\n repr_str += f'act_cfg={self.act_cfg}, '\n repr_str += f'norm_cfg={self.norm_cfg})'\n return repr_str"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_bbox_forward", "code": "def _bbox_forward(self, x, rois):\n \"\"\"Box head forward function used in both training and testing time\"\"\"\n bbox_cls_feats = self.bbox_roi_extractor(\n x[:self.bbox_roi_extractor.num_inputs], rois)\n bbox_reg_feats = self.bbox_roi_extractor(\n x[:self.bbox_roi_extractor.num_inputs],\n rois,\n roi_scale_factor=self.reg_roi_scale_factor)\n if self.with_shared_head:\n bbox_cls_feats = self.shared_head(bbox_cls_feats)\n bbox_reg_feats = self.shared_head(bbox_reg_feats)\n cls_score, bbox_pred = self.bbox_head(bbox_cls_feats, bbox_reg_feats)\n\n bbox_results = dict(\n cls_score=cls_score,\n bbox_pred=bbox_pred,\n bbox_feats=bbox_cls_feats)\n return bbox_results", "docstring": "\"\"\"Box head forward function used in both training and testing time\"\"\"", "url": "https://github.com/ViTAE-Transformer/RSP/blob/f29818739165215d341af2ef8c20f9e2daecf128/Object Detection/mmdet/models/roi_heads/double_roi_head.py#L16-L33", "sha": "f29818739165215d341af2ef8c20f9e2daecf128", "code/function": "def _bbox_forward(self, x, rois):\n \n bbox_cls_feats = self.bbox_roi_extractor(\n x[:self.bbox_roi_extractor.num_inputs], rois)\n bbox_reg_feats = self.bbox_roi_extractor(\n x[:self.bbox_roi_extractor.num_inputs],\n rois,\n roi_scale_factor=self.reg_roi_scale_factor)\n if self.with_shared_head:\n bbox_cls_feats = self.shared_head(bbox_cls_feats)\n bbox_reg_feats = self.shared_head(bbox_reg_feats)\n cls_score, bbox_pred = self.bbox_head(bbox_cls_feats, bbox_reg_feats)\n\n bbox_results = dict(\n cls_score=cls_score,\n bbox_pred=bbox_pred,\n bbox_feats=bbox_cls_feats)\n return bbox_results"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "add", "code": "def add(self, *args, action_masks: Optional[np.ndarray] = None, **kwargs) -> None:\n \"\"\"\n :param action_masks: Masks applied to constrain the choice of possible actions.\n \"\"\"\n if action_masks is not None:\n self.action_masks[self.pos] = action_masks.reshape(\n (self.n_envs, self.mask_dims))\n\n super().add(*args, **kwargs)", "docstring": "\"\"\"\n :param action_masks: Masks applied to constrain the choice of possible actions.\n \"\"\"", "url": "https://github.com/Leezekun/Directional-Stimulus-Prompting/blob/93b44f9e74d608732fd7809e664cdc6c9f1f769b/rl4lms/algorithms/common/maskable/buffers.py#L72-L80", "sha": "93b44f9e74d608732fd7809e664cdc6c9f1f769b", "code/function": "def add(self, *args, action_masks: Optional[np.ndarray] = None, **kwargs) -> None:\n \n if action_masks is not None:\n self.action_masks[self.pos] = action_masks.reshape(\n (self.n_envs, self.mask_dims))\n\n super().add(*args, **kwargs)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_ngram_counts", "code": "def _ngram_counts(sequence, order):\n \"\"\"Returns count of all ngrams of given order in sequence.\"\"\"\n if len(sequence) < order:\n return collections.Counter()\n return collections.Counter(_ngrams(sequence, order))", "docstring": "\"\"\"Returns count of all ngrams of given order in sequence.\"\"\"", "url": "https://github.com/Leezekun/Directional-Stimulus-Prompting/blob/93b44f9e74d608732fd7809e664cdc6c9f1f769b/rl4lms/data_pools/task_utils/totto/eval_utils/totto_parent_eval.py#L285-L289", "sha": "93b44f9e74d608732fd7809e664cdc6c9f1f769b", "code/function": "def _ngram_counts(sequence, order):\n \n if len(sequence) < order:\n return collections.Counter()\n return collections.Counter(_ngrams(sequence, order))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "apply_image", "code": "def apply_image(self, image: np.ndarray) -> np.ndarray:\n \"\"\"\n Expects a numpy array with shape HxWxC in uint8 format.\n \"\"\"\n target_size = self.get_preprocess_shape(image.shape[0], image.shape[1], self.target_length)\n return np.array(resize(to_pil_image(image), target_size))", "docstring": "\"\"\"\n Expects a numpy array with shape HxWxC in uint8 format.\n \"\"\"", "url": "https://github.com/IDEA-Research/Grounded-Segment-Anything/blob/126abe633ffe333e16e4a0a4e946bc1003caf757/segment_anything/segment_anything/utils/transforms.py#L26-L31", "sha": "126abe633ffe333e16e4a0a4e946bc1003caf757", "code/function": "def apply_image(self, image: np.ndarray) -> np.ndarray:\n \n target_size = self.get_preprocess_shape(image.shape[0], image.shape[1], self.target_length)\n return np.array(resize(to_pil_image(image), target_size))"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "compute_sigma_t", "code": "def compute_sigma_t(self, t):\n \"\"\"Compute coefficient of x0\"\"\"\n p_sigma_t = 2 * self.log_mean_coeff(t)\n sigma_t = th.sqrt(1 - th.exp(p_sigma_t))\n d_sigma_t = th.exp(p_sigma_t) * (2 * self.d_log_mean_coeff(t)) / (-2 * sigma_t)\n return sigma_t, d_sigma_t", "docstring": "\"\"\"Compute coefficient of x0\"\"\"", "url": "https://github.com/mit-han-lab/efficientvit/blob/b94ff779828eea399c78f626b574da2d50ef2e49/efficientvit/diffusioncore/models/sit_sampler/path.py#L162-L167", "sha": "b94ff779828eea399c78f626b574da2d50ef2e49", "code/function": "def compute_sigma_t(self, t):\n \n p_sigma_t = 2 * self.log_mean_coeff(t)\n sigma_t = th.sqrt(1 - th.exp(p_sigma_t))\n d_sigma_t = th.exp(p_sigma_t) * (2 * self.d_log_mean_coeff(t)) / (-2 * sigma_t)\n return sigma_t, d_sigma_t"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "create_video_note", "code": "def create_video_note(\n self,\n title,\n video_path: str,\n desc: str,\n cover_path: str = None,\n ats: list = None,\n post_time: str = None,\n topics: list = None,\n is_private: bool = False,\n wait_time: int = 3,\n ):\n \"\"\"发布视频笔记\n\n :param title: 笔记标题\n :param video_path: 视频文件路径,目前只支持本地路径\n :param desc: 笔记详情\n :param cover_path: 可选,封面文件路径\n :param ats: 可选,@用户信息\n :param post_time: 可选,发布时间\n :param topics: 可选,话题信息\n :param is_private: 可选,是否私密发布\n :param wait_time: 可选,默认 3 s,循环等待获取视频第一帧为笔记封面\n :return:\n :rtype: object\n \"\"\"\n if ats is None:\n ats = []\n if topics is None:\n topics = []\n\n file_id, token = self.get_upload_files_permit(\"video\")\n res = self.upload_file(\n file_id,\n token,\n video_path,\n content_type=\"video/mp4\",\n )\n video_id, is_upload = res.headers[\"X-Ros-Video-Id\"], False\n\n image_id = None\n if cover_path is None:\n for _ in range(10):\n time.sleep(wait_time)\n image_id = self.get_video_first_frame_image_id(video_id)\n if image_id:\n break\n\n if cover_path:\n is_upload = True\n image_id, token = self.get_upload_files_permit(\"image\")\n self.upload_file(image_id, token, cover_path)\n\n cover_info = {\n \"file_id\": image_id,\n \"frame\": {\"ts\": 0, \"is_user_select\": False, \"is_upload\": is_upload},\n }\n\n video_info = {\n \"file_id\": file_id,\n \"timelines\": [],\n \"cover\": cover_info,\n \"chapters\": [],\n \"chapter_sync_text\": False,\n \"entrance\": \"web\",\n }\n return self.create_note(title, desc, NoteType.VIDEO.value, ats=ats, topics=topics, video_info=video_info,\n post_time=post_time, is_private=is_private)", "docstring": "\"\"\"发布视频笔记\n\n :param title: 笔记标题\n :param video_path: 视频文件路径,目前只支持本地路径\n :param desc: 笔记详情\n :param cover_path: 可选,封面文件路径\n :param ats: 可选,@用户信息\n :param post_time: 可选,发布时间\n :param topics: 可选,话题信息\n :param is_private: 可选,是否私密发布\n :param wait_time: 可选,默认 3 s,循环等待获取视频第一帧为笔记封面\n :return:\n :rtype: object\n \"\"\"", "url": "https://github.com/ReaJason/xhs/blob/613036c431f1f8b68d6d4e8125e20629679bee41/xhs/core.py#L1013-L1080", "sha": "613036c431f1f8b68d6d4e8125e20629679bee41", "code/function": "def create_video_note(\n self,\n title,\n video_path: str,\n desc: str,\n cover_path: str = None,\n ats: list = None,\n post_time: str = None,\n topics: list = None,\n is_private: bool = False,\n wait_time: int = 3,\n ):\n \n if ats is None:\n ats = []\n if topics is None:\n topics = []\n\n file_id, token = self.get_upload_files_permit(\"video\")\n res = self.upload_file(\n file_id,\n token,\n video_path,\n content_type=\"video/mp4\",\n )\n video_id, is_upload = res.headers[\"X-Ros-Video-Id\"], False\n\n image_id = None\n if cover_path is None:\n for _ in range(10):\n time.sleep(wait_time)\n image_id = self.get_video_first_frame_image_id(video_id)\n if image_id:\n break\n\n if cover_path:\n is_upload = True\n image_id, token = self.get_upload_files_permit(\"image\")\n self.upload_file(image_id, token, cover_path)\n\n cover_info = {\n \"file_id\": image_id,\n \"frame\": {\"ts\": 0, \"is_user_select\": False, \"is_upload\": is_upload},\n }\n\n video_info = {\n \"file_id\": file_id,\n \"timelines\": [],\n \"cover\": cover_info,\n \"chapters\": [],\n \"chapter_sync_text\": False,\n \"entrance\": \"web\",\n }\n return self.create_note(title, desc, NoteType.VIDEO.value, ats=ats, topics=topics, video_info=video_info,\n post_time=post_time, is_private=is_private)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BaseImageEncoder.name", "code": "@abstractproperty\n def name(self) -> str:\n \"\"\"\n returns the name of the encoder\n \"\"\"", "docstring": "\"\"\"\n returns the name of the encoder\n \"\"\"", "url": "https://github.com/kerrj/lerf/blob/db08d578038d884542688511bd9ad7b489a65673/lerf/encoders/image_encoder.py#L16-L20", "sha": "db08d578038d884542688511bd9ad7b489a65673", "code/function": "@abstractproperty\n def name(self) -> str:"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self, equ_h: int, equ_w: int):\n \"\"\"Args:\n equ_h: (int) the height of the generated equirect\n equ_w: (int) the width of the generated equirect\n \"\"\"\n\n # Cubemap input\n input_projections = get_cubemap_projections(equ_h,equ_h)\n\n # Equirectangular output\n output_projection = EquirectProjection(equ_h, equ_w)\n super(Cube2Equirect, self).__init__(\n input_projections, output_projection\n )", "docstring": "\"\"\"Args:\n equ_h: (int) the height of the generated equirect\n equ_w: (int) the width of the generated equirect\n \"\"\"", "url": "https://github.com/MarSaKi/ETPNav/blob/8dec13a4e24f8bc671a3269bbcf3238793607621/habitat_extensions/obs_transformers.py#L197-L210", "sha": "8dec13a4e24f8bc671a3269bbcf3238793607621", "code/function": "def __init__(self, equ_h: int, equ_w: int):\n \n\n # Cubemap input\n input_projections = get_cubemap_projections(equ_h,equ_h)\n\n # Equirectangular output\n output_projection = EquirectProjection(equ_h, equ_w)\n super(Cube2Equirect, self).__init__(\n input_projections, output_projection\n )"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_coor_colors", "code": "def get_coor_colors(obj_labels):\n \"\"\"\n Args:\n obj_labels: 1 is ground, labels > 1 indicates different instance cluster\n\n Returns:\n rgb: [N, 3]. color for each point.\n \"\"\"\n colors = matplotlib.colors.XKCD_COLORS.values()\n max_color_num = obj_labels.max()\n\n color_list = list(colors)[:max_color_num+1]\n colors_rgba = [matplotlib.colors.to_rgba_array(color) for color in color_list]\n label_rgba = np.array(colors_rgba)[obj_labels]\n label_rgba = label_rgba.squeeze()[:, :3]\n\n return label_rgba", "docstring": "\"\"\"\n Args:\n obj_labels: 1 is ground, labels > 1 indicates different instance cluster\n\n Returns:\n rgb: [N, 3]. color for each point.\n \"\"\"", "url": "https://github.com/darrenjkt/MS3D/blob/ffed761a6846183966cc38d3144c90d91446fa4a/tools/visual_utils/open3d_vis_utils.py#L30-L46", "sha": "ffed761a6846183966cc38d3144c90d91446fa4a", "code/function": "def get_coor_colors(obj_labels):\n \n colors = matplotlib.colors.XKCD_COLORS.values()\n max_color_num = obj_labels.max()\n\n color_list = list(colors)[:max_color_num+1]\n colors_rgba = [matplotlib.colors.to_rgba_array(color) for color in color_list]\n label_rgba = np.array(colors_rgba)[obj_labels]\n label_rgba = label_rgba.squeeze()[:, :3]\n\n return label_rgba"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "apply_coords", "code": "def apply_coords(self, coords: np.ndarray, original_size: Tuple[int, ...]) -> np.ndarray:\n \"\"\"\n Expects a numpy array of length 2 in the final dimension. Requires the\n original image size in (H, W) format.\n \"\"\"\n old_h, old_w = original_size\n new_h, new_w = self.get_preprocess_shape(\n original_size[0], original_size[1], self.target_length\n )\n coords = deepcopy(coords).astype(float)\n coords[..., 0] = coords[..., 0] * (new_w / old_w)\n coords[..., 1] = coords[..., 1] * (new_h / old_h)\n return coords", "docstring": "\"\"\"\n Expects a numpy array of length 2 in the final dimension. Requires the\n original image size in (H, W) format.\n \"\"\"", "url": "https://github.com/ngthanhtin/owlvit_segment_anything/blob/2deca3a5d9760e6863e088db4d9a46912c6d83b9/segment_anything/segment_anything/utils/transforms.py#L33-L45", "sha": "2deca3a5d9760e6863e088db4d9a46912c6d83b9", "code/function": "def apply_coords(self, coords: np.ndarray, original_size: Tuple[int, ...]) -> np.ndarray:\n \n old_h, old_w = original_size\n new_h, new_w = self.get_preprocess_shape(\n original_size[0], original_size[1], self.target_length\n )\n coords = deepcopy(coords).astype(float)\n coords[..., 0] = coords[..., 0] * (new_w / old_w)\n coords[..., 1] = coords[..., 1] * (new_h / old_h)\n return coords"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "f_train_wrapper", "code": "def f_train_wrapper(args, pt_model, loss_wrapper, device, \\\n optimizer_wrapper, \\\n train_dataset_wrapper, \\\n val_dataset_wrapper = None, \\\n checkpoint = None):\n \"\"\" \n f_train_wrapper(args, pt_model, loss_wrapper, device, \n optimizer_wrapper\n train_dataset_wrapper, val_dataset_wrapper = None,\n check_point = None):\n A wrapper to run the training process\n\n Args:\n args: argument information given by argpase\n pt_model: pytorch model (torch.nn.Module)\n loss_wrapper: a wrapper over loss function\n loss_wrapper.compute(generated, target) \n device: torch.device(\"cuda\") or torch.device(\"cpu\")\n\n optimizer_wrapper: \n a wrapper over optimizer (defined in op_manager.py)\n optimizer_wrapper.optimizer is torch.optimizer\n \n train_dataset_wrapper: \n a wrapper over training data set (data_io/default_data_io.py)\n train_dataset_wrapper.get_loader() returns torch.DataSetLoader\n \n val_dataset_wrapper: \n a wrapper over validation data set (data_io/default_data_io.py)\n it can None.\n \n check_point:\n a check_point that stores every thing to resume training\n \"\"\" \n \n nii_display.f_print_w_date(\"Start model training\")\n\n ##############\n ## Preparation\n ##############\n\n # get the optimizer\n optimizer_wrapper.print_info()\n optimizer = optimizer_wrapper.optimizer\n lr_scheduler = optimizer_wrapper.lr_scheduler\n epoch_num = optimizer_wrapper.get_epoch_num()\n no_best_epoch_num = optimizer_wrapper.get_no_best_epoch_num()\n \n # get data loader for training set\n train_dataset_wrapper.print_info()\n train_data_loader = train_dataset_wrapper.get_loader()\n train_seq_num = train_dataset_wrapper.get_seq_num()\n\n # get the training process monitor\n monitor_trn = nii_monitor.Monitor(epoch_num, train_seq_num)\n\n # if validation data is provided, get data loader for val set\n if val_dataset_wrapper is not None:\n val_dataset_wrapper.print_info()\n val_data_loader = val_dataset_wrapper.get_loader()\n val_seq_num = val_dataset_wrapper.get_seq_num()\n monitor_val = nii_monitor.Monitor(epoch_num, val_seq_num)\n else:\n monitor_val = None\n \n # training log information\n train_log = ''\n\n # prepare for DataParallism if available\n # pytorch.org/tutorials/beginner/blitz/data_parallel_tutorial.html\n if torch.cuda.device_count() > 1 and args.multi_gpu_data_parallel:\n flag_multi_device = True \n nii_display.f_print(\"\\nUse %d GPUs\\n\" % (torch.cuda.device_count()))\n # no way to call normtarget_f after pt_model is in DataParallel\n normtarget_f = pt_model.normalize_target\n pt_model = nn.DataParallel(pt_model)\n else:\n nii_display.f_print(\"\\nUse single GPU: %s\\n\" % \\\n (torch.cuda.get_device_name(device)))\n flag_multi_device = False\n normtarget_f = None\n pt_model.to(device, dtype=nii_dconf.d_dtype)\n\n # print the network\n nii_nn_tools.f_model_show(pt_model)\n nii_nn_tools.f_loss_show(loss_wrapper)\n\n ###############################\n ## Resume training if necessary\n ###############################\n # resume training or initialize the model if necessary\n cp_names = nii_nn_manage_conf.CheckPointKey()\n if checkpoint is not None:\n if type(checkpoint) is dict:\n # checkpoint\n\n # load model parameter and optimizer state\n if cp_names.state_dict in checkpoint:\n # wrap the state_dic in f_state_dict_wrapper \n # in case the model is saved when DataParallel is on\n pt_model.load_state_dict(\n nii_nn_tools.f_state_dict_wrapper(\n checkpoint[cp_names.state_dict], \n flag_multi_device))\n\n # load optimizer state\n if cp_names.optimizer in checkpoint and \\\n not args.ignore_optimizer_statistics_in_trained_model:\n optimizer.load_state_dict(checkpoint[cp_names.optimizer])\n \n # optionally, load training history\n if not args.ignore_training_history_in_trained_model:\n #nii_display.f_print(\"Load \")\n if cp_names.trnlog in checkpoint:\n monitor_trn.load_state_dic(\n checkpoint[cp_names.trnlog])\n if cp_names.vallog in checkpoint and monitor_val:\n monitor_val.load_state_dic(\n checkpoint[cp_names.vallog])\n if cp_names.info in checkpoint:\n train_log = checkpoint[cp_names.info]\n if cp_names.lr_scheduler in checkpoint and \\\n checkpoint[cp_names.lr_scheduler] and lr_scheduler.f_valid():\n lr_scheduler.f_load_state_dict(\n checkpoint[cp_names.lr_scheduler])\n \n nii_display.f_print(\"Load check point, resume training\")\n else:\n nii_display.f_print(\"Load pretrained model and optimizer\")\n else:\n # only model status\n pt_model.load_state_dict(\n nii_nn_tools.f_state_dict_wrapper(\n checkpoint, flag_multi_device))\n nii_display.f_print(\"Load pretrained model\")\n \n\n ######################\n ### User defined setup \n ######################\n if hasattr(pt_model, \"other_setups\"):\n nii_display.f_print(\"Conduct User-defined setup\")\n pt_model.other_setups()\n \n # This should be merged with other_setups\n if hasattr(pt_model, \"g_pretrained_model_path\") and \\\n hasattr(pt_model, \"g_pretrained_model_prefix\"):\n nii_display.f_print(\"Load pret-rained models as part of this mode\")\n nii_nn_tools.f_load_pretrained_model_partially(\n pt_model, pt_model.g_pretrained_model_path, \n pt_model.g_pretrained_model_prefix)\n \n ######################\n ### Start training\n ######################\n # other variables\n flag_early_stopped = False\n start_epoch = monitor_trn.get_epoch()\n epoch_num = monitor_trn.get_max_epoch()\n\n # print\n _ = nii_op_display_tk.print_log_head()\n nii_display.f_print_message(train_log, flush=True, end='')\n \n \n # loop over multiple epochs\n for epoch_idx in range(start_epoch, epoch_num):\n\n # training one epoch\n pt_model.train()\n # set validation flag if necessary\n if hasattr(pt_model, 'validation'):\n pt_model.validation = False\n mes = \"Warning: model.validation is deprecated, \"\n mes += \"please use model.flag_validation\"\n nii_display.f_print(mes, 'warning')\n if hasattr(pt_model, 'flag_validation'):\n pt_model.flag_validation = False\n\n f_run_one_epoch(args, pt_model, loss_wrapper, device, \\\n monitor_trn, train_data_loader, \\\n epoch_idx, optimizer, normtarget_f)\n time_trn = monitor_trn.get_time(epoch_idx)\n loss_trn = monitor_trn.get_loss(epoch_idx)\n \n # if necessary, do validataion \n if val_dataset_wrapper is not None:\n # set eval() if necessary \n if args.eval_mode_for_validation:\n pt_model.eval()\n\n # set validation flag if necessary\n if hasattr(pt_model, 'validation'):\n pt_model.validation = True\n mes = \"Warning: model.validation is deprecated, \"\n mes += \"please use model.flag_validation\"\n nii_display.f_print(mes, 'warning')\n if hasattr(pt_model, 'flag_validation'):\n pt_model.flag_validation = True\n\n with torch.no_grad():\n f_run_one_epoch(args, pt_model, loss_wrapper, \\\n device, \\\n monitor_val, val_data_loader, \\\n epoch_idx, None, normtarget_f)\n time_val = monitor_val.get_time(epoch_idx)\n loss_val = monitor_val.get_loss(epoch_idx)\n \n # update lr rate scheduler if necessary\n if lr_scheduler.f_valid():\n lr_scheduler.f_step(loss_val)\n\n else:\n time_val, loss_val = 0, 0\n \n \n if val_dataset_wrapper is not None:\n flag_new_best = monitor_val.is_new_best()\n else:\n flag_new_best = True\n \n # print information\n train_log += nii_op_display_tk.print_train_info(\n epoch_idx, time_trn, loss_trn, time_val, loss_val, \n flag_new_best, optimizer_wrapper.get_lr_info())\n\n # save the best model\n if flag_new_best:\n tmp_best_name = nii_nn_tools.f_save_trained_name(args)\n torch.save(pt_model.state_dict(), tmp_best_name)\n \n # save intermediate model if necessary\n if not args.not_save_each_epoch:\n tmp_model_name = nii_nn_tools.f_save_epoch_name(args, epoch_idx)\n \n if monitor_val is not None:\n tmp_val_log = monitor_val.get_state_dic()\n else:\n tmp_val_log = None\n \n if lr_scheduler.f_valid():\n lr_scheduler_state = lr_scheduler.f_state_dict()\n else:\n lr_scheduler_state = None\n\n # save\n tmp_dic = {\n cp_names.state_dict : pt_model.state_dict(),\n cp_names.info : train_log,\n cp_names.optimizer : optimizer.state_dict(),\n cp_names.trnlog : monitor_trn.get_state_dic(),\n cp_names.vallog : tmp_val_log,\n cp_names.lr_scheduler : lr_scheduler_state\n }\n torch.save(tmp_dic, tmp_model_name)\n if args.verbose == 1:\n nii_display.f_eprint(str(datetime.datetime.now()))\n nii_display.f_eprint(\"Save {:s}\".format(tmp_model_name),\n flush=True)\n \n \n # Early stopping\n # note: if LR scheduler is used, early stopping will be\n # disabled\n if lr_scheduler.f_allow_early_stopping() and \\\n monitor_val is not None and \\\n monitor_val.should_early_stop(no_best_epoch_num):\n flag_early_stopped = True\n break\n \n # loop done \n nii_op_display_tk.print_log_tail()\n if flag_early_stopped:\n nii_display.f_print(\"Training finished by early stopping\")\n else:\n nii_display.f_print(\"Training finished\")\n nii_display.f_print(\"Model is saved to\", end = '')\n nii_display.f_print(\"{}\".format(nii_nn_tools.f_save_trained_name(args)))\n return", "docstring": "\"\"\" \n f_train_wrapper(args, pt_model, loss_wrapper, device, \n optimizer_wrapper\n train_dataset_wrapper, val_dataset_wrapper = None,\n check_point = None):\n A wrapper to run the training process\n\n Args:\n args: argument information given by argpase\n pt_model: pytorch model (torch.nn.Module)\n loss_wrapper: a wrapper over loss function\n loss_wrapper.compute(generated, target) \n device: torch.device(\"cuda\") or torch.device(\"cpu\")\n\n optimizer_wrapper: \n a wrapper over optimizer (defined in op_manager.py)\n optimizer_wrapper.optimizer is torch.optimizer\n \n train_dataset_wrapper: \n a wrapper over training data set (data_io/default_data_io.py)\n train_dataset_wrapper.get_loader() returns torch.DataSetLoader\n \n val_dataset_wrapper: \n a wrapper over validation data set (data_io/default_data_io.py)\n it can None.\n \n check_point:\n a check_point that stores every thing to resume training\n \"\"\"", "url": "https://github.com/csun22/Synthetic-Voice-Detection-Vocoder-Artifacts/blob/f67a2714489f39eda34f6347e2617ee3a3df2a6b/core_scripts/nn_manager/nn_manager.py#L192-L470", "sha": "f67a2714489f39eda34f6347e2617ee3a3df2a6b", "code/function": "def f_train_wrapper(args, pt_model, loss_wrapper, device, \\\n optimizer_wrapper, \\\n train_dataset_wrapper, \\\n val_dataset_wrapper = None, \\\n checkpoint = None):\n \n \n nii_display.f_print_w_date(\"Start model training\")\n\n ##############\n ## Preparation\n ##############\n\n # get the optimizer\n optimizer_wrapper.print_info()\n optimizer = optimizer_wrapper.optimizer\n lr_scheduler = optimizer_wrapper.lr_scheduler\n epoch_num = optimizer_wrapper.get_epoch_num()\n no_best_epoch_num = optimizer_wrapper.get_no_best_epoch_num()\n \n # get data loader for training set\n train_dataset_wrapper.print_info()\n train_data_loader = train_dataset_wrapper.get_loader()\n train_seq_num = train_dataset_wrapper.get_seq_num()\n\n # get the training process monitor\n monitor_trn = nii_monitor.Monitor(epoch_num, train_seq_num)\n\n # if validation data is provided, get data loader for val set\n if val_dataset_wrapper is not None:\n val_dataset_wrapper.print_info()\n val_data_loader = val_dataset_wrapper.get_loader()\n val_seq_num = val_dataset_wrapper.get_seq_num()\n monitor_val = nii_monitor.Monitor(epoch_num, val_seq_num)\n else:\n monitor_val = None\n \n # training log information\n train_log = ''\n\n # prepare for DataParallism if available\n # pytorch.org/tutorials/beginner/blitz/data_parallel_tutorial.html\n if torch.cuda.device_count() > 1 and args.multi_gpu_data_parallel:\n flag_multi_device = True \n nii_display.f_print(\"\\nUse %d GPUs\\n\" % (torch.cuda.device_count()))\n # no way to call normtarget_f after pt_model is in DataParallel\n normtarget_f = pt_model.normalize_target\n pt_model = nn.DataParallel(pt_model)\n else:\n nii_display.f_print(\"\\nUse single GPU: %s\\n\" % \\\n (torch.cuda.get_device_name(device)))\n flag_multi_device = False\n normtarget_f = None\n pt_model.to(device, dtype=nii_dconf.d_dtype)\n\n # print the network\n nii_nn_tools.f_model_show(pt_model)\n nii_nn_tools.f_loss_show(loss_wrapper)\n\n ###############################\n ## Resume training if necessary\n ###############################\n # resume training or initialize the model if necessary\n cp_names = nii_nn_manage_conf.CheckPointKey()\n if checkpoint is not None:\n if type(checkpoint) is dict:\n # checkpoint\n\n # load model parameter and optimizer state\n if cp_names.state_dict in checkpoint:\n # wrap the state_dic in f_state_dict_wrapper \n # in case the model is saved when DataParallel is on\n pt_model.load_state_dict(\n nii_nn_tools.f_state_dict_wrapper(\n checkpoint[cp_names.state_dict], \n flag_multi_device))\n\n # load optimizer state\n if cp_names.optimizer in checkpoint and \\\n not args.ignore_optimizer_statistics_in_trained_model:\n optimizer.load_state_dict(checkpoint[cp_names.optimizer])\n \n # optionally, load training history\n if not args.ignore_training_history_in_trained_model:\n #nii_display.f_print(\"Load \")\n if cp_names.trnlog in checkpoint:\n monitor_trn.load_state_dic(\n checkpoint[cp_names.trnlog])\n if cp_names.vallog in checkpoint and monitor_val:\n monitor_val.load_state_dic(\n checkpoint[cp_names.vallog])\n if cp_names.info in checkpoint:\n train_log = checkpoint[cp_names.info]\n if cp_names.lr_scheduler in checkpoint and \\\n checkpoint[cp_names.lr_scheduler] and lr_scheduler.f_valid():\n lr_scheduler.f_load_state_dict(\n checkpoint[cp_names.lr_scheduler])\n \n nii_display.f_print(\"Load check point, resume training\")\n else:\n nii_display.f_print(\"Load pretrained model and optimizer\")\n else:\n # only model status\n pt_model.load_state_dict(\n nii_nn_tools.f_state_dict_wrapper(\n checkpoint, flag_multi_device))\n nii_display.f_print(\"Load pretrained model\")\n \n\n ######################\n ### User defined setup \n ######################\n if hasattr(pt_model, \"other_setups\"):\n nii_display.f_print(\"Conduct User-defined setup\")\n pt_model.other_setups()\n \n # This should be merged with other_setups\n if hasattr(pt_model, \"g_pretrained_model_path\") and \\\n hasattr(pt_model, \"g_pretrained_model_prefix\"):\n nii_display.f_print(\"Load pret-rained models as part of this mode\")\n nii_nn_tools.f_load_pretrained_model_partially(\n pt_model, pt_model.g_pretrained_model_path, \n pt_model.g_pretrained_model_prefix)\n \n ######################\n ### Start training\n ######################\n # other variables\n flag_early_stopped = False\n start_epoch = monitor_trn.get_epoch()\n epoch_num = monitor_trn.get_max_epoch()\n\n # print\n _ = nii_op_display_tk.print_log_head()\n nii_display.f_print_message(train_log, flush=True, end='')\n \n \n # loop over multiple epochs\n for epoch_idx in range(start_epoch, epoch_num):\n\n # training one epoch\n pt_model.train()\n # set validation flag if necessary\n if hasattr(pt_model, 'validation'):\n pt_model.validation = False\n mes = \"Warning: model.validation is deprecated, \"\n mes += \"please use model.flag_validation\"\n nii_display.f_print(mes, 'warning')\n if hasattr(pt_model, 'flag_validation'):\n pt_model.flag_validation = False\n\n f_run_one_epoch(args, pt_model, loss_wrapper, device, \\\n monitor_trn, train_data_loader, \\\n epoch_idx, optimizer, normtarget_f)\n time_trn = monitor_trn.get_time(epoch_idx)\n loss_trn = monitor_trn.get_loss(epoch_idx)\n \n # if necessary, do validataion \n if val_dataset_wrapper is not None:\n # set eval() if necessary \n if args.eval_mode_for_validation:\n pt_model.eval()\n\n # set validation flag if necessary\n if hasattr(pt_model, 'validation'):\n pt_model.validation = True\n mes = \"Warning: model.validation is deprecated, \"\n mes += \"please use model.flag_validation\"\n nii_display.f_print(mes, 'warning')\n if hasattr(pt_model, 'flag_validation'):\n pt_model.flag_validation = True\n\n with torch.no_grad():\n f_run_one_epoch(args, pt_model, loss_wrapper, \\\n device, \\\n monitor_val, val_data_loader, \\\n epoch_idx, None, normtarget_f)\n time_val = monitor_val.get_time(epoch_idx)\n loss_val = monitor_val.get_loss(epoch_idx)\n \n # update lr rate scheduler if necessary\n if lr_scheduler.f_valid():\n lr_scheduler.f_step(loss_val)\n\n else:\n time_val, loss_val = 0, 0\n \n \n if val_dataset_wrapper is not None:\n flag_new_best = monitor_val.is_new_best()\n else:\n flag_new_best = True\n \n # print information\n train_log += nii_op_display_tk.print_train_info(\n epoch_idx, time_trn, loss_trn, time_val, loss_val, \n flag_new_best, optimizer_wrapper.get_lr_info())\n\n # save the best model\n if flag_new_best:\n tmp_best_name = nii_nn_tools.f_save_trained_name(args)\n torch.save(pt_model.state_dict(), tmp_best_name)\n \n # save intermediate model if necessary\n if not args.not_save_each_epoch:\n tmp_model_name = nii_nn_tools.f_save_epoch_name(args, epoch_idx)\n \n if monitor_val is not None:\n tmp_val_log = monitor_val.get_state_dic()\n else:\n tmp_val_log = None\n \n if lr_scheduler.f_valid():\n lr_scheduler_state = lr_scheduler.f_state_dict()\n else:\n lr_scheduler_state = None\n\n # save\n tmp_dic = {\n cp_names.state_dict : pt_model.state_dict(),\n cp_names.info : train_log,\n cp_names.optimizer : optimizer.state_dict(),\n cp_names.trnlog : monitor_trn.get_state_dic(),\n cp_names.vallog : tmp_val_log,\n cp_names.lr_scheduler : lr_scheduler_state\n }\n torch.save(tmp_dic, tmp_model_name)\n if args.verbose == 1:\n nii_display.f_eprint(str(datetime.datetime.now()))\n nii_display.f_eprint(\"Save {:s}\".format(tmp_model_name),\n flush=True)\n \n \n # Early stopping\n # note: if LR scheduler is used, early stopping will be\n # disabled\n if lr_scheduler.f_allow_early_stopping() and \\\n monitor_val is not None and \\\n monitor_val.should_early_stop(no_best_epoch_num):\n flag_early_stopped = True\n break\n \n # loop done \n nii_op_display_tk.print_log_tail()\n if flag_early_stopped:\n nii_display.f_print(\"Training finished by early stopping\")\n else:\n nii_display.f_print(\"Training finished\")\n nii_display.f_print(\"Model is saved to\", end = '')\n nii_display.f_print(\"{}\".format(nii_nn_tools.f_save_trained_name(args)))\n return"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnsiArtDocument.decode_based_on_file_extension", "code": "@staticmethod\n def decode_based_on_file_extension(content: bytes, file_path: str, default_bg: str = \"#ffffff\", default_fg: str = \"#000000\") -> 'AnsiArtDocument':\n \"\"\"Creates a document from the given bytes, detecting the file format.\n\n Raises FormatReadNotSupported if the file format is not supported for reading. Some are write-only.\n Raises UnicodeDecodeError, which can be a very long message, so make sure to handle it!\n Raises UnidentifiedImageError if the format is not detected.\n \"\"\"\n format_id = AnsiArtDocument.format_from_extension(file_path)\n # print(\"Supported image formats for reading:\", Image.OPEN.keys())\n # TODO: try loading as image first, then as text if that fails with UnidentifiedImageError\n # That way it can handle images without file extensions.\n if format_id in Image.OPEN:\n return AnsiArtDocument.from_image_format(content)\n elif format_id == \"ANSI\":\n return AnsiArtDocument.from_ansi(content.decode('utf-8'), default_bg, default_fg)\n elif format_id == \"IRC\":\n return AnsiArtDocument.from_irc(content.decode('utf-8'), default_bg, default_fg)\n elif format_id == \"PLAINTEXT\":\n return AnsiArtDocument.from_plain(content.decode('utf-8'), default_bg, default_fg)\n elif format_id == \"SVG\":\n return AnsiArtDocument.from_svg(content.decode('utf-8'), default_bg, default_fg)\n elif format_id in Image.SAVE or format_id in [\"HTML\", \"RICH_CONSOLE_MARKUP\"]:\n # This is a write-only format.\n raise FormatReadNotSupported(localized_message=_(\"Cannot read files saved as %1 format.\", format_id))\n else:\n # This is an unknown format.\n # For now at least, I'm preserving the behavior of loading as ANSI/PLAINTEXT.\n return AnsiArtDocument.from_text(content.decode('utf-8'), default_bg, default_fg)", "docstring": "\"\"\"Creates a document from the given bytes, detecting the file format.\n\n Raises FormatReadNotSupported if the file format is not supported for reading. Some are write-only.\n Raises UnicodeDecodeError, which can be a very long message, so make sure to handle it!\n Raises UnidentifiedImageError if the format is not detected.\n \"\"\"", "url": "https://github.com/1j01/textual-paint/blob/d61de649a6a3a660d2b024e4259d99acbd45116b/src/textual_paint/ansi_art_document.py#L1033-L1061", "sha": "d61de649a6a3a660d2b024e4259d99acbd45116b", "code/function": "@staticmethod\n def decode_based_on_file_extension(content: bytes, file_path: str, default_bg: str = \"#ffffff\", default_fg: str = \"#000000\") -> 'AnsiArtDocument':\n \n format_id = AnsiArtDocument.format_from_extension(file_path)\n # print(\"Supported image formats for reading:\", Image.OPEN.keys())\n # TODO: try loading as image first, then as text if that fails with UnidentifiedImageError\n # That way it can handle images without file extensions.\n if format_id in Image.OPEN:\n return AnsiArtDocument.from_image_format(content)\n elif format_id == \"ANSI\":\n return AnsiArtDocument.from_ansi(content.decode('utf-8'), default_bg, default_fg)\n elif format_id == \"IRC\":\n return AnsiArtDocument.from_irc(content.decode('utf-8'), default_bg, default_fg)\n elif format_id == \"PLAINTEXT\":\n return AnsiArtDocument.from_plain(content.decode('utf-8'), default_bg, default_fg)\n elif format_id == \"SVG\":\n return AnsiArtDocument.from_svg(content.decode('utf-8'), default_bg, default_fg)\n elif format_id in Image.SAVE or format_id in [\"HTML\", \"RICH_CONSOLE_MARKUP\"]:\n # This is a write-only format.\n raise FormatReadNotSupported(localized_message=_(\"Cannot read files saved as %1 format.\", format_id))\n else:\n # This is an unknown format.\n # For now at least, I'm preserving the behavior of loading as ANSI/PLAINTEXT.\n return AnsiArtDocument.from_text(content.decode('utf-8'), default_bg, default_fg)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "update", "code": "async def update(self, db: AsyncSession, dept_id: int, obj_in: UpdateDeptParam) -> int:\n \"\"\"\n 更新部门\n\n :param db:\n :param dept_id:\n :param obj_in:\n :return:\n \"\"\"\n return await self.update_model(db, dept_id, obj_in)", "docstring": "\"\"\"\n 更新部门\n\n :param db:\n :param dept_id:\n :param obj_in:\n :return:\n \"\"\"", "url": "https://github.com/fastapi-practices/fastapi_best_architecture/blob/1d1a9175801291ae614d983b1e77c3455bb0839c/backend/app/admin/crud/crud_dept.py#L69-L78", "sha": "1d1a9175801291ae614d983b1e77c3455bb0839c", "code/function": "async def update(self, db: AsyncSession, dept_id: int, obj_in: UpdateDeptParam) -> int:\n \n return await self.update_model(db, dept_id, obj_in)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "enable", "code": "def enable(self, scene: Scene = None):\n \"\"\" Actually loads the learnable params into the scene nodes attr\n\n Args:\n scene (Scene, optional): An optional target scene to load the learnable params.\n If not provided, `self.scene` will be used. Defaults to None.\n \"\"\"\n self.is_enabled = True\n scene = self.scene or scene\n if self.refine_ego_motion:\n for node_id, transform in self['ego_motion'].items():\n node = scene.all_nodes[node_id]\n # NOTE: Here, 'transform' of node.frame_data.subattr will be set to \\\n # the handle of the learnable 'transform' nn.Module here. \n # This allows the learnable transform to be used with gradients during the rendering process, \n # making the learnable param here part of the computation graph.\n node.frame_data.subattr['transform'] = transform\n\n if self.refine_other_motion:\n for node_id, transform in self['other_motion'].items():\n node = scene.all_nodes[node_id]\n node.frame_data.subattr['transform'] = transform\n\n if self.refine_camera_intr:\n pass\n \n if self.refine_camera_extr:\n pass\n\n if self.refine_sensor_ts:\n for node_id, ts in self['sensor_ts'].items():\n node = scene.all_nodes[node_id]\n node.frame_data.subattr['global_ts'] = ts", "docstring": "\"\"\" Actually loads the learnable params into the scene nodes attr\n\n Args:\n scene (Scene, optional): An optional target scene to load the learnable params.\n If not provided, `self.scene` will be used. Defaults to None.\n \"\"\"", "url": "https://github.com/PJLab-ADG/neuralsim/blob/faba099e0feb11ea0089490a5e87565e25bc4a2c/app/models/scene/learnable_params.py#L194-L226", "sha": "faba099e0feb11ea0089490a5e87565e25bc4a2c", "code/function": "def enable(self, scene: Scene = None):\n \n self.is_enabled = True\n scene = self.scene or scene\n if self.refine_ego_motion:\n for node_id, transform in self['ego_motion'].items():\n node = scene.all_nodes[node_id]\n # NOTE: Here, 'transform' of node.frame_data.subattr will be set to \\\n # the handle of the learnable 'transform' nn.Module here. \n # This allows the learnable transform to be used with gradients during the rendering process, \n # making the learnable param here part of the computation graph.\n node.frame_data.subattr['transform'] = transform\n\n if self.refine_other_motion:\n for node_id, transform in self['other_motion'].items():\n node = scene.all_nodes[node_id]\n node.frame_data.subattr['transform'] = transform\n\n if self.refine_camera_intr:\n pass\n \n if self.refine_camera_extr:\n pass\n\n if self.refine_sensor_ts:\n for node_id, ts in self['sensor_ts'].items():\n node = scene.all_nodes[node_id]\n node.frame_data.subattr['global_ts'] = ts"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TripleExtract.find_subject_predicate_object_with_chunk", "code": "@staticmethod\n def find_subject_predicate_object_with_chunk(sentence, chunk):\n \"\"\"\n Extract subject, predicate, and object from a sentence, using a chunk for context.\n\n Args:\n sentence (str): The input sentence.\n chunk (str): A chunk of text providing context.\n\n Returns:\n tuple: A tuple containing (subject, predicate, object), each as a string or None if not found.\n\n Raises:\n ValueError: If the input sentence or chunk is not a string or is empty.\n Exception: For any other unexpected errors during processing.\n \"\"\"\n if not isinstance(sentence, str) or not sentence.strip():\n raise ValueError(\"Input sentence must be a non-empty string\")\n if not isinstance(chunk, str) or not chunk.strip():\n raise ValueError(\"Input chunk must be a non-empty string\")\n\n try:\n doc = nlp(chunk) # Process the chunk for context\n sentence_doc = nlp(sentence) # Process the sentence separately\n\n # Identify named entities using SpaCy NER\n entities = [ent for ent in doc.ents if ent.label_ in (\"PERSON\", \"ORG\")]\n\n subject, predicate, _object = None, None, None\n\n # Use syntactic dependency labels to identify subject and verb\n for token in sentence_doc:\n if token.dep_ == \"nsubj\" or token.dep_ == \"nsubjpass\":\n subject = token\n\n # Filter irrelevant words based on POS tags and additional stop words\n if subject and isinstance(subject, spacy.tokens.Doc):\n filtered_subject_words = [\n word.text\n for word in subject.words\n if word.pos_ not in [\"STOP\", \"ADP\", \"DET\", \"AUX\"] # Add AUX for auxiliary verbs\n ]\n else:\n filtered_subject_words = [subject.text] if subject else None\n\n # Join the filtered words with a space\n subject_text = \" \".join(filtered_subject_words) if filtered_subject_words else None\n print(f\"\\nDEBUG CHUNK: \\nFiltered subject words: {filtered_subject_words}\\nSubject text: {subject_text}\\n\")\n\n elif token.pos_ == \"VERB\":\n # Check if it's part of a verb phrase indicating the predicate\n if token.dep_ == \"aux\" and nlp(token.head.text).pos_ == \"VERB\":\n continue # Skip auxiliary verbs\n else:\n predicate = token.head # Consider the head of the verb phrase as the predicate\n break\n\n # If subject not found directly, explore other possibilities\n if not subject:\n if predicate:\n # Check for subject within relative clauses or previous entities\n for child in predicate.children:\n if child.dep_ == \"relcl\":\n subject = TripleExtract.find_subject_in_clause_with_chunk(child,\n entities.copy()) # Pass a copy of entities\n if subject:\n break\n elif child.dep_ == \"pobj\" and len(entities) > 0:\n # Check if object from previous sentence is the subject\n for entity in entities:\n if entity.text == child.text:\n subject = entity\n break\n else:\n # Try finding a verb phrase as the subject\n for chunk in doc.noun_chunks:\n if any(token.pos_ == \"VERB\" for token in chunk):\n subject = chunk\n break\n\n # Look for candidate objects after finding subject and predicate\n if subject and predicate:\n for child in predicate.children:\n if child.dep_ in [\"dobj\", \"attr\", \"iobj\"]:\n for grandchild in child.children: # Iterate over child.children\n if grandchild.dep_ == \"pobj\":\n _object = grandchild\n break # Stop iterating after finding the object\n elif child.dep_ == \"prep\":\n for grandchild in child.children:\n if grandchild.dep_ == \"pobj\":\n _object = grandchild\n break # Stop iterating after finding the object\n\n # Convert identified tokens to text if they exist\n subject_text = subject.text if subject else None\n predicate_text = predicate.lemma_ if predicate else None # Using lemma for base form of verb\n object_text = _object.text if _object else None\n\n print(\n f\"usingContext:\\nSubject: {subject_text}\\nPredicate: {predicate_text}\\nObject: {object_text}\")\n return subject_text, predicate_text, object_text\n\n except Exception as e:\n raise Exception(f\"An error occurred while processing the sentence with chunk: {str(e)}\")", "docstring": "\"\"\"\n Extract subject, predicate, and object from a sentence, using a chunk for context.\n\n Args:\n sentence (str): The input sentence.\n chunk (str): A chunk of text providing context.\n\n Returns:\n tuple: A tuple containing (subject, predicate, object), each as a string or None if not found.\n\n Raises:\n ValueError: If the input sentence or chunk is not a string or is empty.\n Exception: For any other unexpected errors during processing.\n \"\"\"", "url": "https://github.com/DataBassGit/AgentForge/blob/feabdd0febe7172e1b99b5dcacc1f7138847f7e3/src/agentforge/tools/triple_extract.py#L137-L241", "sha": "feabdd0febe7172e1b99b5dcacc1f7138847f7e3", "code/function": "@staticmethod\n def find_subject_predicate_object_with_chunk(sentence, chunk):\n \n if not isinstance(sentence, str) or not sentence.strip():\n raise ValueError(\"Input sentence must be a non-empty string\")\n if not isinstance(chunk, str) or not chunk.strip():\n raise ValueError(\"Input chunk must be a non-empty string\")\n\n try:\n doc = nlp(chunk) # Process the chunk for context\n sentence_doc = nlp(sentence) # Process the sentence separately\n\n # Identify named entities using SpaCy NER\n entities = [ent for ent in doc.ents if ent.label_ in (\"PERSON\", \"ORG\")]\n\n subject, predicate, _object = None, None, None\n\n # Use syntactic dependency labels to identify subject and verb\n for token in sentence_doc:\n if token.dep_ == \"nsubj\" or token.dep_ == \"nsubjpass\":\n subject = token\n\n # Filter irrelevant words based on POS tags and additional stop words\n if subject and isinstance(subject, spacy.tokens.Doc):\n filtered_subject_words = [\n word.text\n for word in subject.words\n if word.pos_ not in [\"STOP\", \"ADP\", \"DET\", \"AUX\"] # Add AUX for auxiliary verbs\n ]\n else:\n filtered_subject_words = [subject.text] if subject else None\n\n # Join the filtered words with a space\n subject_text = \" \".join(filtered_subject_words) if filtered_subject_words else None\n print(f\"\\nDEBUG CHUNK: \\nFiltered subject words: {filtered_subject_words}\\nSubject text: {subject_text}\\n\")\n\n elif token.pos_ == \"VERB\":\n # Check if it's part of a verb phrase indicating the predicate\n if token.dep_ == \"aux\" and nlp(token.head.text).pos_ == \"VERB\":\n continue # Skip auxiliary verbs\n else:\n predicate = token.head # Consider the head of the verb phrase as the predicate\n break\n\n # If subject not found directly, explore other possibilities\n if not subject:\n if predicate:\n # Check for subject within relative clauses or previous entities\n for child in predicate.children:\n if child.dep_ == \"relcl\":\n subject = TripleExtract.find_subject_in_clause_with_chunk(child,\n entities.copy()) # Pass a copy of entities\n if subject:\n break\n elif child.dep_ == \"pobj\" and len(entities) > 0:\n # Check if object from previous sentence is the subject\n for entity in entities:\n if entity.text == child.text:\n subject = entity\n break\n else:\n # Try finding a verb phrase as the subject\n for chunk in doc.noun_chunks:\n if any(token.pos_ == \"VERB\" for token in chunk):\n subject = chunk\n break\n\n # Look for candidate objects after finding subject and predicate\n if subject and predicate:\n for child in predicate.children:\n if child.dep_ in [\"dobj\", \"attr\", \"iobj\"]:\n for grandchild in child.children: # Iterate over child.children\n if grandchild.dep_ == \"pobj\":\n _object = grandchild\n break # Stop iterating after finding the object\n elif child.dep_ == \"prep\":\n for grandchild in child.children:\n if grandchild.dep_ == \"pobj\":\n _object = grandchild\n break # Stop iterating after finding the object\n\n # Convert identified tokens to text if they exist\n subject_text = subject.text if subject else None\n predicate_text = predicate.lemma_ if predicate else None # Using lemma for base form of verb\n object_text = _object.text if _object else None\n\n print(\n f\"usingContext:\\nSubject: {subject_text}\\nPredicate: {predicate_text}\\nObject: {object_text}\")\n return subject_text, predicate_text, object_text\n\n except Exception as e:\n raise Exception(f\"An error occurred while processing the sentence with chunk: {str(e)}\")"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "test_complete_chat_sse_kwargs", "code": "def test_complete_chat_sse_kwargs(self):\n \"\"\"\n Tests that the StreamingClaudeWrapper can be used to perform streaming chat completion with kwargs.\n \"\"\"\n fixture = StreamingClaudeWrapper(anthropic_api_key, model=\"claude-v1\", format_sse=True, append_stop_token=False,\n temperature=0.9, top_k=2)\n\n test_streaming_complete_chat_sse(self, fixture, check_stop=False, verbose=False)", "docstring": "\"\"\"\n Tests that the StreamingClaudeWrapper can be used to perform streaming chat completion with kwargs.\n \"\"\"", "url": "https://github.com/wgryc/phasellm/blob/974d026dc649e4a71da4c25bf8c934622e56cf5d/tests/e2e/llms/test_e2e_llms.py#L624-L631", "sha": "974d026dc649e4a71da4c25bf8c934622e56cf5d", "code/function": "def test_complete_chat_sse_kwargs(self):\n \n fixture = StreamingClaudeWrapper(anthropic_api_key, model=\"claude-v1\", format_sse=True, append_stop_token=False,\n temperature=0.9, top_k=2)\n\n test_streaming_complete_chat_sse(self, fixture, check_stop=False, verbose=False)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_rel_pos", "code": "def get_rel_pos(q_size: int, k_size: int, rel_pos: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Get relative positional embeddings according to the relative positions of\n query and key sizes.\n Args:\n q_size (int): size of query q.\n k_size (int): size of key k.\n rel_pos (Tensor): relative position embeddings (L, C).\n\n Returns:\n Extracted positional embeddings according to relative positions.\n \"\"\"\n max_rel_dist = int(2 * max(q_size, k_size) - 1)\n # Interpolate rel pos if needed.\n if rel_pos.shape[0] != max_rel_dist:\n # Interpolate rel pos.\n rel_pos_resized = F.interpolate(\n rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1),\n size=max_rel_dist,\n mode=\"linear\",\n )\n rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0)\n else:\n rel_pos_resized = rel_pos\n\n # Scale the coords with short length if shapes for q and k are different.\n q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0)\n k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0)\n relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0)\n\n return rel_pos_resized[relative_coords.long()]", "docstring": "\"\"\"\n Get relative positional embeddings according to the relative positions of\n query and key sizes.\n Args:\n q_size (int): size of query q.\n k_size (int): size of key k.\n rel_pos (Tensor): relative position embeddings (L, C).\n\n Returns:\n Extracted positional embeddings according to relative positions.\n \"\"\"", "url": "https://github.com/RockeyCoss/Prompt-Segment-Anything/blob/5d1704db7489e79d4cd2a6eed99b7a39d8d5acf0/projects/instance_segment_anything/models/segment_anything/modeling/image_encoder.py#L292-L322", "sha": "5d1704db7489e79d4cd2a6eed99b7a39d8d5acf0", "code/function": "def get_rel_pos(q_size: int, k_size: int, rel_pos: torch.Tensor) -> torch.Tensor:\n \n max_rel_dist = int(2 * max(q_size, k_size) - 1)\n # Interpolate rel pos if needed.\n if rel_pos.shape[0] != max_rel_dist:\n # Interpolate rel pos.\n rel_pos_resized = F.interpolate(\n rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1),\n size=max_rel_dist,\n mode=\"linear\",\n )\n rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0)\n else:\n rel_pos_resized = rel_pos\n\n # Scale the coords with short length if shapes for q and k are different.\n q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0)\n k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0)\n relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0)\n\n return rel_pos_resized[relative_coords.long()]"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "join_path", "code": "def join_path(self, filepath: Union[str, Path],\n *filepaths: Union[str, Path]) -> str:\n \"\"\"Concatenate all file paths.\n\n Join one or more filepath components intelligently. The return value\n is the concatenation of filepath and any members of *filepaths.\n\n Args:\n filepath (str or Path): Path to be concatenated.\n\n Returns:\n str: The result of concatenation.\n \"\"\"\n return self.client.join_path(filepath, *filepaths)", "docstring": "\"\"\"Concatenate all file paths.\n\n Join one or more filepath components intelligently. The return value\n is the concatenation of filepath and any members of *filepaths.\n\n Args:\n filepath (str or Path): Path to be concatenated.\n\n Returns:\n str: The result of concatenation.\n \"\"\"", "url": "https://github.com/IDEA-Research/HumanSD/blob/c5db29dd66a3e40afa8b4bed630f0aa7ea001880/comparison_models/ControlNet/annotator/uniformer/mmcv/fileio/file_client.py#L1079-L1092", "sha": "c5db29dd66a3e40afa8b4bed630f0aa7ea001880", "code/function": "def join_path(self, filepath: Union[str, Path],\n *filepaths: Union[str, Path]) -> str:\n \n return self.client.join_path(filepath, *filepaths)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "normalize", "code": "def normalize(x, mean=IMAGENET_MEAN, std=IMAGENET_STD, inplace=False):\n \"\"\"Denormalize RGB images x per ImageNet stats in BCHW format, i.e. = (x - mean) / std.\"\"\"\n return TF.normalize(x, mean, std, inplace=inplace)", "docstring": "\"\"\"Denormalize RGB images x per ImageNet stats in BCHW format, i.e. = (x - mean) / std.\"\"\"", "url": "https://github.com/bingogome/samm/blob/ee627cd5ad43d65d57182a7a1ae0fca3e51a79fd/samm-python-terminal/thirdparty/MedicalSAMAdapter/models/MobileSAMv2/ultralytics/yolo/data/dataloaders/v5augmentations.py#L59-L61", "sha": "ee627cd5ad43d65d57182a7a1ae0fca3e51a79fd", "code/function": "def normalize(x, mean=IMAGENET_MEAN, std=IMAGENET_STD, inplace=False):\n \n return TF.normalize(x, mean, std, inplace=inplace)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ring_attention_tpu", "code": "@partial(\n\tjax.custom_vjp,\n\tnondiff_argnums=[6, 7, 8, 9, 10, 11],\n)\ndef ring_attention_tpu(\n\tquery: chex.Array,\n\tkey: chex.Array,\n\tvalue: chex.Array,\n\tbias: tp.Optional[chex.Array] = None,\n\tsegment_ids: tp.Optional[SegmentIds] = None,\n\tcache_idx: tp.Optional[int] = None,\n\taxis_name: tp.Optional[str] = None,\n\tfloat32_logits: bool = True,\n\tsoftmax_scale: tp.Optional[float] = None,\n\tblocksize_q: int = 256,\n\tblocksize_k: int = 256,\n\tblocksize_c: tp.Optional[int] = None,\n) -> chex.Array:\n\t\"\"\"Computes ring attention using FlashAttention on TPU.\n\n\tArgs:\n\t query: Query array of shape (batch, query_len, num_heads, dim_per_head).\n\t key: Key array of shape (batch, kv_len, num_heads, dim_per_head).\n\t value: Value array of shape (batch, kv_len, num_heads, dim_per_head).\n\t bias: tp.Optional bias array. Its shape depends on the attention mechanism.\n\t segment_ids: tp.Optional segment ids for Q and KV sequences.\n\t cache_idx: tp.Optional cache index for use with caching.\n\t axis_name: tp.Optional name of the axis to ppermute over (for multi-host support).\n\t float32_logits: Whether to compute logits in float32.\n\t softmax_scale: tp.Optional scaling factor for the softmax function.\n\t blocksize_q: Block size for the query sequence.\n\t blocksize_k: Block size for the key/value sequence.\n\t blocksize_c: tp.Optional block size for causal masking.\n\n\n\tReturns:\n\t Output array of shape (batch, query_len, num_heads, dim_per_head).\n\t\"\"\"\n\ty, _ = _ring_flash_attention_fwd_tpu(\n\t\tquery,\n\t\tkey,\n\t\tvalue,\n\t\tbias,\n\t\tsegment_ids,\n\t\tcache_idx,\n\t\taxis_name,\n\t\tfloat32_logits,\n\t\tsoftmax_scale,\n\t\tblocksize_q,\n\t\tblocksize_k,\n\t\tblocksize_c,\n\t)\n\treturn y", "docstring": "\"\"\"Computes ring attention using FlashAttention on TPU.\n\n\tArgs:\n\t query: Query array of shape (batch, query_len, num_heads, dim_per_head).\n\t key: Key array of shape (batch, kv_len, num_heads, dim_per_head).\n\t value: Value array of shape (batch, kv_len, num_heads, dim_per_head).\n\t bias: tp.Optional bias array. Its shape depends on the attention mechanism.\n\t segment_ids: tp.Optional segment ids for Q and KV sequences.\n\t cache_idx: tp.Optional cache index for use with caching.\n\t axis_name: tp.Optional name of the axis to ppermute over (for multi-host support).\n\t float32_logits: Whether to compute logits in float32.\n\t softmax_scale: tp.Optional scaling factor for the softmax function.\n\t blocksize_q: Block size for the query sequence.\n\t blocksize_k: Block size for the key/value sequence.\n\t blocksize_c: tp.Optional block size for causal masking.\n\n\n\tReturns:\n\t Output array of shape (batch, query_len, num_heads, dim_per_head).\n\t\"\"\"", "url": "https://github.com/erfanzar/EasyDeL/blob/104bb42a9cf23050382a53392b677dc4d4b8d579/easydel/kernels/tpu_ops/pallas_ring_attention.py#L323-L375", "sha": "104bb42a9cf23050382a53392b677dc4d4b8d579", "code/function": "@partial(\n\tjax.custom_vjp,\n\tnondiff_argnums=[6, 7, 8, 9, 10, 11],\n)\ndef ring_attention_tpu(\n\tquery: chex.Array,\n\tkey: chex.Array,\n\tvalue: chex.Array,\n\tbias: tp.Optional[chex.Array] = None,\n\tsegment_ids: tp.Optional[SegmentIds] = None,\n\tcache_idx: tp.Optional[int] = None,\n\taxis_name: tp.Optional[str] = None,\n\tfloat32_logits: bool = True,\n\tsoftmax_scale: tp.Optional[float] = None,\n\tblocksize_q: int = 256,\n\tblocksize_k: int = 256,\n\tblocksize_c: tp.Optional[int] = None,\n) -> chex.Array:\n\t\n\ty, _ = _ring_flash_attention_fwd_tpu(\n\t\tquery,\n\t\tkey,\n\t\tvalue,\n\t\tbias,\n\t\tsegment_ids,\n\t\tcache_idx,\n\t\taxis_name,\n\t\tfloat32_logits,\n\t\tsoftmax_scale,\n\t\tblocksize_q,\n\t\tblocksize_k,\n\t\tblocksize_c,\n\t)\n\treturn y"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pack_sequences", "code": "def pack_sequences(\n\tdataset: Dataset,\n\tmax_length: int = 512,\n\tpad_token_id: int = 0,\n\treset_position_ids: bool = False,\n\tnum_proc: tp.Optional[int] = None,\n):\n\t\"\"\"\n\tPack sequences together with their attention masks and position IDs\n\n\t# With continuous position IDs\n\tpacked_dataset = pack_sequences(\n\t\t\tdataset,\n\t\t\tmax_length=512,\n\t\t\tpad_token_id=0,\n\t\t\treset_position_ids=False\n\t)\n\n\t# With reset position IDs for each sequence\n\tpacked_dataset = pack_sequences(\n\t\t\tdataset,\n\t\t\tmax_length=512,\n\t\t\tpad_token_id=0,\n\t\t\treset_position_ids=True\n\t)\n\n\t# Example output format for a packed sequence with two sequences:\n\t# reset_position_ids=False:\n\t{\n\t\t\t'input_ids': [seq1_tokens + [PAD] + seq2_tokens + [PAD] + padding],\n\t\t\t'attention_mask': [1,1,1,0,1,1,1,0,0,0],\n\t\t\t'position_ids': [0,1,2,3,4,5,6,7,0,0]\n\t}\n\n\t# reset_position_ids=True:\n\t{\n\t\t\t'input_ids': [seq1_tokens + [PAD] + seq2_tokens + [PAD] + padding],\n\t\t\t'attention_mask': [1,1,1,0,1,1,1,0,0,0],\n\t\t\t'position_ids': [0,1,2,0,0,1,2,0,0,0]\n\t}\n\n\tArgs:\n\t dataset: Dataset containing 'input_ids' and 'attention_mask'\n\t max_length: Maximum length of packed sequence\n\t pad_token_id: Token ID used for padding\n\t reset_position_ids: If True, reset position IDs for each sequence in the pack\n\n\tReturns:\n\t packed_dataset: Dataset with packed sequences, attention masks, and position IDs\n\t\"\"\"\n\n\tdef pack_examples(examples):\n\t\tcurrent_packed_input_ids = []\n\t\tcurrent_packed_attention_mask = []\n\t\tcurrent_packed_position_ids = []\n\t\tcurrent_length = 0\n\n\t\tpacked_input_ids = []\n\t\tpacked_attention_mask = []\n\t\tpacked_position_ids = []\n\n\t\tdef get_position_ids(length, start_position=0):\n\t\t\tif reset_position_ids:\n\t\t\t\treturn list(range(length))\n\t\t\telse:\n\t\t\t\treturn list(range(start_position, start_position + length))\n\n\t\t# Iterate through all examples\n\t\tfor input_ids, attention_mask in zip(\n\t\t\texamples[\"input_ids\"], examples[\"attention_mask\"]\n\t\t):\n\t\t\tseq_length = len(input_ids)\n\n\t\t\t# If adding this sequence would exceed max_length, start a new packed sequence\n\t\t\tif current_length + seq_length + 1 > max_length:\n\t\t\t\t# Pad the current packed sequence if needed\n\t\t\t\tif current_length < max_length:\n\t\t\t\t\tpadding_length = max_length - current_length\n\t\t\t\t\tcurrent_packed_input_ids.extend([pad_token_id] * padding_length)\n\t\t\t\t\tcurrent_packed_attention_mask.extend([0] * padding_length)\n\t\t\t\t\tcurrent_packed_position_ids.extend([0] * padding_length)\n\n\t\t\t\t# Add the completed packed sequence to results\n\t\t\t\tpacked_input_ids.append(current_packed_input_ids)\n\t\t\t\tpacked_attention_mask.append(current_packed_attention_mask)\n\t\t\t\tpacked_position_ids.append(current_packed_position_ids)\n\n\t\t\t\t# Start new packed sequence\n\t\t\t\tcurrent_packed_input_ids = []\n\t\t\t\tcurrent_packed_attention_mask = []\n\t\t\t\tcurrent_packed_position_ids = []\n\t\t\t\tcurrent_length = 0\n\n\t\t\t# Generate position IDs for current sequence\n\t\t\tposition_ids = get_position_ids(seq_length, start_position=current_length)\n\n\t\t\t# Add current sequence\n\t\t\tcurrent_packed_input_ids.extend(input_ids)\n\t\t\tcurrent_packed_attention_mask.extend(attention_mask)\n\t\t\tcurrent_packed_position_ids.extend(position_ids)\n\n\t\t\t# Add separator token\n\t\t\tcurrent_packed_input_ids.append(pad_token_id)\n\t\t\tcurrent_packed_attention_mask.append(0)\n\t\t\tcurrent_packed_position_ids.append(\n\t\t\t\tposition_ids[-1] + 1 if not reset_position_ids else 0\n\t\t\t)\n\n\t\t\tcurrent_length += seq_length + 1\n\n\t\t# Handle the last packed sequence\n\t\tif current_packed_input_ids:\n\t\t\t# Pad if needed\n\t\t\tif current_length < max_length:\n\t\t\t\tpadding_length = max_length - current_length\n\t\t\t\tcurrent_packed_input_ids.extend([pad_token_id] * padding_length)\n\t\t\t\tcurrent_packed_attention_mask.extend([0] * padding_length)\n\t\t\t\tcurrent_packed_position_ids.extend([0] * padding_length)\n\n\t\t\tpacked_input_ids.append(current_packed_input_ids)\n\t\t\tpacked_attention_mask.append(current_packed_attention_mask)\n\t\t\tpacked_position_ids.append(current_packed_position_ids)\n\n\t\treturn {\n\t\t\t\"input_ids\": packed_input_ids,\n\t\t\t\"attention_mask\": packed_attention_mask,\n\t\t\t\"position_ids\": packed_position_ids,\n\t\t}\n\n\t# Process the dataset in batches\n\tpacked_dataset = dataset.map(\n\t\tpack_examples,\n\t\tbatched=True,\n\t\tremove_columns=dataset.column_names,\n\t\tdesc=\"Packing sequences\",\n\t\tnum_proc=num_proc,\n\t)\n\n\treturn packed_dataset", "docstring": "\"\"\"\n\tPack sequences together with their attention masks and position IDs\n\n\t# With continuous position IDs\n\tpacked_dataset = pack_sequences(\n\t\t\tdataset,\n\t\t\tmax_length=512,\n\t\t\tpad_token_id=0,\n\t\t\treset_position_ids=False\n\t)\n\n\t# With reset position IDs for each sequence\n\tpacked_dataset = pack_sequences(\n\t\t\tdataset,\n\t\t\tmax_length=512,\n\t\t\tpad_token_id=0,\n\t\t\treset_position_ids=True\n\t)\n\n\t# Example output format for a packed sequence with two sequences:\n\t# reset_position_ids=False:\n\t{\n\t\t\t'input_ids': [seq1_tokens + [PAD] + seq2_tokens + [PAD] + padding],\n\t\t\t'attention_mask': [1,1,1,0,1,1,1,0,0,0],\n\t\t\t'position_ids': [0,1,2,3,4,5,6,7,0,0]\n\t}\n\n\t# reset_position_ids=True:\n\t{\n\t\t\t'input_ids': [seq1_tokens + [PAD] + seq2_tokens + [PAD] + padding],\n\t\t\t'attention_mask': [1,1,1,0,1,1,1,0,0,0],\n\t\t\t'position_ids': [0,1,2,0,0,1,2,0,0,0]\n\t}\n\n\tArgs:\n\t dataset: Dataset containing 'input_ids' and 'attention_mask'\n\t max_length: Maximum length of packed sequence\n\t pad_token_id: Token ID used for padding\n\t reset_position_ids: If True, reset position IDs for each sequence in the pack\n\n\tReturns:\n\t packed_dataset: Dataset with packed sequences, attention masks, and position IDs\n\t\"\"\"", "url": "https://github.com/erfanzar/EasyDeL/blob/104bb42a9cf23050382a53392b677dc4d4b8d579/easydel/trainers/packer.py#L11-L149", "sha": "104bb42a9cf23050382a53392b677dc4d4b8d579", "code/function": "def pack_sequences(\n\tdataset: Dataset,\n\tmax_length: int = 512,\n\tpad_token_id: int = 0,\n\treset_position_ids: bool = False,\n\tnum_proc: tp.Optional[int] = None,\n):\n\t\n\n\tdef pack_examples(examples):\n\t\tcurrent_packed_input_ids = []\n\t\tcurrent_packed_attention_mask = []\n\t\tcurrent_packed_position_ids = []\n\t\tcurrent_length = 0\n\n\t\tpacked_input_ids = []\n\t\tpacked_attention_mask = []\n\t\tpacked_position_ids = []\n\n\t\tdef get_position_ids(length, start_position=0):\n\t\t\tif reset_position_ids:\n\t\t\t\treturn list(range(length))\n\t\t\telse:\n\t\t\t\treturn list(range(start_position, start_position + length))\n\n\t\t# Iterate through all examples\n\t\tfor input_ids, attention_mask in zip(\n\t\t\texamples[\"input_ids\"], examples[\"attention_mask\"]\n\t\t):\n\t\t\tseq_length = len(input_ids)\n\n\t\t\t# If adding this sequence would exceed max_length, start a new packed sequence\n\t\t\tif current_length + seq_length + 1 > max_length:\n\t\t\t\t# Pad the current packed sequence if needed\n\t\t\t\tif current_length < max_length:\n\t\t\t\t\tpadding_length = max_length - current_length\n\t\t\t\t\tcurrent_packed_input_ids.extend([pad_token_id] * padding_length)\n\t\t\t\t\tcurrent_packed_attention_mask.extend([0] * padding_length)\n\t\t\t\t\tcurrent_packed_position_ids.extend([0] * padding_length)\n\n\t\t\t\t# Add the completed packed sequence to results\n\t\t\t\tpacked_input_ids.append(current_packed_input_ids)\n\t\t\t\tpacked_attention_mask.append(current_packed_attention_mask)\n\t\t\t\tpacked_position_ids.append(current_packed_position_ids)\n\n\t\t\t\t# Start new packed sequence\n\t\t\t\tcurrent_packed_input_ids = []\n\t\t\t\tcurrent_packed_attention_mask = []\n\t\t\t\tcurrent_packed_position_ids = []\n\t\t\t\tcurrent_length = 0\n\n\t\t\t# Generate position IDs for current sequence\n\t\t\tposition_ids = get_position_ids(seq_length, start_position=current_length)\n\n\t\t\t# Add current sequence\n\t\t\tcurrent_packed_input_ids.extend(input_ids)\n\t\t\tcurrent_packed_attention_mask.extend(attention_mask)\n\t\t\tcurrent_packed_position_ids.extend(position_ids)\n\n\t\t\t# Add separator token\n\t\t\tcurrent_packed_input_ids.append(pad_token_id)\n\t\t\tcurrent_packed_attention_mask.append(0)\n\t\t\tcurrent_packed_position_ids.append(\n\t\t\t\tposition_ids[-1] + 1 if not reset_position_ids else 0\n\t\t\t)\n\n\t\t\tcurrent_length += seq_length + 1\n\n\t\t# Handle the last packed sequence\n\t\tif current_packed_input_ids:\n\t\t\t# Pad if needed\n\t\t\tif current_length < max_length:\n\t\t\t\tpadding_length = max_length - current_length\n\t\t\t\tcurrent_packed_input_ids.extend([pad_token_id] * padding_length)\n\t\t\t\tcurrent_packed_attention_mask.extend([0] * padding_length)\n\t\t\t\tcurrent_packed_position_ids.extend([0] * padding_length)\n\n\t\t\tpacked_input_ids.append(current_packed_input_ids)\n\t\t\tpacked_attention_mask.append(current_packed_attention_mask)\n\t\t\tpacked_position_ids.append(current_packed_position_ids)\n\n\t\treturn {\n\t\t\t\"input_ids\": packed_input_ids,\n\t\t\t\"attention_mask\": packed_attention_mask,\n\t\t\t\"position_ids\": packed_position_ids,\n\t\t}\n\n\t# Process the dataset in batches\n\tpacked_dataset = dataset.map(\n\t\tpack_examples,\n\t\tbatched=True,\n\t\tremove_columns=dataset.column_names,\n\t\tdesc=\"Packing sequences\",\n\t\tnum_proc=num_proc,\n\t)\n\n\treturn packed_dataset"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "solve_heun", "code": "def solve_heun(self, x, t_span, mu, mask, spks, cond, training=False, guidance_scale=0.0):\n \"\"\"\n Fixed heun solver for ODEs.\n Args:\n x (torch.Tensor): random noise\n t_span (torch.Tensor): n_timesteps interpolated\n shape: (n_timesteps + 1,)\n mu (torch.Tensor): output of encoder\n shape: (batch_size, n_feats, mel_timesteps)\n mask (torch.Tensor): output_mask\n shape: (batch_size, 1, mel_timesteps)\n cond: Not used but kept for future purposes\n \"\"\"\n t, _, dt = t_span[0], t_span[-1], t_span[1] - t_span[0]\n\n #-! : reserved space for debugger\n sol = []\n steps = 1\n\n while steps <= len(t_span) - 1:\n dphi_dt = self.func_dphi_dt(x, mask, mu, t, spks, cond, training=training, guidance_scale=guidance_scale)\n dphi_dt_2 = self.func_dphi_dt(x + dt * dphi_dt, mask, mu, t+dt, spks, cond, training=training, guidance_scale=guidance_scale)\n \n #- Euler's -> Y'n+1' = Y'n' + h * F(X'n', Y'n')\n # x = x + dt * dphi_dt\n \n #- Heun's -> Y'n+1' = Y'n' + h * 0.5( F(X'n', Y'n') + F(X'n' + h, Y'n' + h * F(X'n', Y'n') ) )\n x = x + dt * 0.5 * (dphi_dt + dphi_dt_2)\n t = t + dt\n\n sol.append(x)\n if steps < len(t_span) - 1:\n dt = t_span[steps + 1] - t\n steps += 1\n\n return sol[-1]", "docstring": "\"\"\"\n Fixed heun solver for ODEs.\n Args:\n x (torch.Tensor): random noise\n t_span (torch.Tensor): n_timesteps interpolated\n shape: (n_timesteps + 1,)\n mu (torch.Tensor): output of encoder\n shape: (batch_size, n_feats, mel_timesteps)\n mask (torch.Tensor): output_mask\n shape: (batch_size, 1, mel_timesteps)\n cond: Not used but kept for future purposes\n \"\"\"", "url": "https://github.com/alphacep/vosk-tts/blob/89b23a8b033133e25e3e7f53d07939645b8ea51c/training/stabletts/matcha/models/components/flow_matching.py#L91-L126", "sha": "89b23a8b033133e25e3e7f53d07939645b8ea51c", "code/function": "def solve_heun(self, x, t_span, mu, mask, spks, cond, training=False, guidance_scale=0.0):\n \n t, _, dt = t_span[0], t_span[-1], t_span[1] - t_span[0]\n\n #-! : reserved space for debugger\n sol = []\n steps = 1\n\n while steps <= len(t_span) - 1:\n dphi_dt = self.func_dphi_dt(x, mask, mu, t, spks, cond, training=training, guidance_scale=guidance_scale)\n dphi_dt_2 = self.func_dphi_dt(x + dt * dphi_dt, mask, mu, t+dt, spks, cond, training=training, guidance_scale=guidance_scale)\n \n #- Euler's -> Y'n+1' = Y'n' + h * F(X'n', Y'n')\n # x = x + dt * dphi_dt\n \n #- Heun's -> Y'n+1' = Y'n' + h * 0.5( F(X'n', Y'n') + F(X'n' + h, Y'n' + h * F(X'n', Y'n') ) )\n x = x + dt * 0.5 * (dphi_dt + dphi_dt_2)\n t = t + dt\n\n sol.append(x)\n if steps < len(t_span) - 1:\n dt = t_span[steps + 1] - t\n steps += 1\n\n return sol[-1]"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "reset", "code": "def reset(self) -> None:\n \"\"\"\n Clear buffer and reset internal counters.\n \"\"\"\n\n self._buffer.clear()\n self._has_item.clear()\n self._prefill = self.prefill\n self._last_tx = self._last_rx = self._generation = self._generation_ts = 0", "docstring": "\"\"\"\n Clear buffer and reset internal counters.\n \"\"\"", "url": "https://github.com/imayhaveborkedit/discord-ext-voice-recv/blob/3398a4d9d2f646cfcd60f68e626cd750b759893f/discord/ext/voice_recv/buffer.py#L202-L210", "sha": "3398a4d9d2f646cfcd60f68e626cd750b759893f", "code/function": "def reset(self) -> None:\n \n\n self._buffer.clear()\n self._has_item.clear()\n self._prefill = self.prefill\n self._last_tx = self._last_rx = self._generation = self._generation_ts = 0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "has_data_stream", "code": "def has_data_stream(arg_type: Type) -> bool:\n \"\"\"Recursive check for a DataStream container in a type annotation\"\"\"\n if _is_data_stream(arg_type):\n return True\n\n typing_args = get_args(arg_type)\n if len(typing_args) > 0:\n for typ in typing_args:\n if has_data_stream(typ):\n return True\n\n return False", "docstring": "\"\"\"Recursive check for a DataStream container in a type annotation\"\"\"", "url": "https://github.com/caikit/caikit/blob/ce3fa2c129ce15a5e2095d466a8f01ec2e0c577d/caikit/runtime/service_generation/type_helpers.py#L26-L37", "sha": "ce3fa2c129ce15a5e2095d466a8f01ec2e0c577d", "code/function": "def has_data_stream(arg_type: Type) -> bool:\n \n if _is_data_stream(arg_type):\n return True\n\n typing_args = get_args(arg_type)\n if len(typing_args) > 0:\n for typ in typing_args:\n if has_data_stream(typ):\n return True\n\n return False"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AppResource.app_name", "code": "@property\n def app_name(self):\n \"\"\"Return the app name.\"\"\"\n return self._app_name", "docstring": "\"\"\"Return the app name.\"\"\"", "url": "https://github.com/eosphoros-ai/DB-GPT/blob/0310ce9fa333f14954bed4c4994da5ef419c27c7/dbgpt/agent/resource/app.py#L99-L102", "sha": "0310ce9fa333f14954bed4c4994da5ef419c27c7", "code/function": "@property\n def app_name(self):\n \n return self._app_name"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Document.langchain2doc", "code": "@classmethod\n def langchain2doc(cls, document):\n \"\"\"Transform Langchain to Document format.\"\"\"\n metadata = document.metadata or {}\n return cls(content=document.page_content, metadata=metadata)", "docstring": "\"\"\"Transform Langchain to Document format.\"\"\"", "url": "https://github.com/eosphoros-ai/DB-GPT/blob/0310ce9fa333f14954bed4c4994da5ef419c27c7/dbgpt/core/interface/knowledge.py#L28-L32", "sha": "0310ce9fa333f14954bed4c4994da5ef419c27c7", "code/function": "@classmethod\n def langchain2doc(cls, document):\n \n metadata = document.metadata or {}\n return cls(content=document.page_content, metadata=metadata)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_table_summary", "code": "def get_table_summary(self, table_name):\n \"\"\"Get table summary for table.\n\n example:\n table_name(column1(column1 comment),column2(column2 comment),\n column3(column3 comment) and index keys, and table comment: {table_comment})\n \"\"\"\n return _parse_table_summary(self.db, self.summary_template, table_name)", "docstring": "\"\"\"Get table summary for table.\n\n example:\n table_name(column1(column1 comment),column2(column2 comment),\n column3(column3 comment) and index keys, and table comment: {table_comment})\n \"\"\"", "url": "https://github.com/eosphoros-ai/DB-GPT/blob/0310ce9fa333f14954bed4c4994da5ef419c27c7/dbgpt/rag/summary/rdbms_db_summary.py#L52-L59", "sha": "0310ce9fa333f14954bed4c4994da5ef419c27c7", "code/function": "def get_table_summary(self, table_name):\n \n return _parse_table_summary(self.db, self.summary_template, table_name)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self):\n \"\"\"Client for vis protocol.\"\"\"\n self._vis_tag: Dict[str, Vis] = {}", "docstring": "\"\"\"Client for vis protocol.\"\"\"", "url": "https://github.com/eosphoros-ai/DB-GPT/blob/0310ce9fa333f14954bed4c4994da5ef419c27c7/dbgpt/vis/client.py#L18-L20", "sha": "0310ce9fa333f14954bed4c4994da5ef419c27c7", "code/function": "def __init__(self):\n \n self._vis_tag: Dict[str, Vis] = {}"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "bin_search", "code": "async def bin_search(\n self, blocks: List[str], model_nam: str, max_new_token: int\n ) -> int:\n \"\"\"Binary search to find the split point.\"\"\"\n l, r = 0, len(blocks) - 1\n while l < r:\n mid = l + r + 1 >> 1\n current_tokens = await self._llm_client.count_token(\n model_nam, \"\".join(blocks[: mid + 1])\n )\n if current_tokens <= max_new_token:\n l = mid\n else:\n r = mid - 1\n return r", "docstring": "\"\"\"Binary search to find the split point.\"\"\"", "url": "https://github.com/eosphoros-ai/DB-GPT/blob/0310ce9fa333f14954bed4c4994da5ef419c27c7/i18n/translate_util.py#L300-L314", "sha": "0310ce9fa333f14954bed4c4994da5ef419c27c7", "code/function": "async def bin_search(\n self, blocks: List[str], model_nam: str, max_new_token: int\n ) -> int:\n \n l, r = 0, len(blocks) - 1\n while l < r:\n mid = l + r + 1 >> 1\n current_tokens = await self._llm_client.count_token(\n model_nam, \"\".join(blocks[: mid + 1])\n )\n if current_tokens <= max_new_token:\n l = mid\n else:\n r = mid - 1\n return r"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "stop_recording", "code": "def stop_recording(self) -> None:\n \"\"\"Stop recording.\"\"\"\n Thread(target=stop_record).start()", "docstring": "\"\"\"Stop recording.\"\"\"", "url": "https://github.com/OpenAdaptAI/OpenAdapt/blob/acdbb7b2236fcbb6f8da8e0162d394608d49d33e/openadapt/app/tray.py#L255-L257", "sha": "acdbb7b2236fcbb6f8da8e0162d394608d49d33e", "code/function": "def stop_recording(self) -> None:\n \n Thread(target=stop_record).start()"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_detection_dataset_dicts", "code": "def get_detection_dataset_dicts(\n names,\n filter_empty=True,\n min_keypoints=0,\n proposal_files=None,\n check_consistency=True,\n):\n \"\"\"\n Load and prepare dataset dicts for instance detection/segmentation and semantic segmentation.\n\n Args:\n names (str or list[str]): a dataset name or a list of dataset names\n filter_empty (bool): whether to filter out images without instance annotations\n min_keypoints (int): filter out images with fewer keypoints than\n `min_keypoints`. Set to 0 to do nothing.\n proposal_files (list[str]): if given, a list of object proposal files\n that match each dataset in `names`.\n check_consistency (bool): whether to check if datasets have consistent metadata.\n\n Returns:\n list[dict]: a list of dicts following the standard dataset dict format.\n \"\"\"\n if isinstance(names, str):\n names = [names]\n assert len(names), names\n dataset_dicts = [DatasetCatalog.get(dataset_name) for dataset_name in names]\n for dataset_name, dicts in zip(names, dataset_dicts):\n assert len(dicts), \"Dataset '{}' is empty!\".format(dataset_name)\n\n if proposal_files is not None:\n assert len(names) == len(proposal_files)\n # load precomputed proposals from proposal files\n dataset_dicts = [\n load_proposals_into_dataset(dataset_i_dicts, proposal_file)\n for dataset_i_dicts, proposal_file in zip(dataset_dicts, proposal_files)\n ]\n\n if isinstance(dataset_dicts[0], torchdata.Dataset):\n return torchdata.ConcatDataset(dataset_dicts)\n\n dataset_dicts = list(itertools.chain.from_iterable(dataset_dicts))\n\n has_instances = \"annotations\" in dataset_dicts[0]\n if filter_empty and has_instances:\n dataset_dicts = filter_images_with_only_crowd_annotations(dataset_dicts)\n if min_keypoints > 0 and has_instances:\n dataset_dicts = filter_images_with_few_keypoints(dataset_dicts, min_keypoints)\n\n if check_consistency and has_instances:\n try:\n class_names = MetadataCatalog.get(names[0]).thing_classes\n check_metadata_consistency(\"thing_classes\", names)\n print_instances_class_histogram(dataset_dicts, class_names)\n except AttributeError: # class names are not available for this dataset\n pass\n\n assert len(dataset_dicts), \"No valid data found in {}.\".format(\",\".join(names))\n return dataset_dicts", "docstring": "\"\"\"\n Load and prepare dataset dicts for instance detection/segmentation and semantic segmentation.\n\n Args:\n names (str or list[str]): a dataset name or a list of dataset names\n filter_empty (bool): whether to filter out images without instance annotations\n min_keypoints (int): filter out images with fewer keypoints than\n `min_keypoints`. Set to 0 to do nothing.\n proposal_files (list[str]): if given, a list of object proposal files\n that match each dataset in `names`.\n check_consistency (bool): whether to check if datasets have consistent metadata.\n\n Returns:\n list[dict]: a list of dicts following the standard dataset dict format.\n \"\"\"", "url": "https://github.com/showlab/Image2Paragraph/blob/a24210a6dd4535a1a43af7a6170fb8ad1e3a6013/models/grit_src/third_party/CenterNet2/detectron2/data/build.py#L216-L273", "sha": "a24210a6dd4535a1a43af7a6170fb8ad1e3a6013", "code/function": "def get_detection_dataset_dicts(\n names,\n filter_empty=True,\n min_keypoints=0,\n proposal_files=None,\n check_consistency=True,\n):\n \n if isinstance(names, str):\n names = [names]\n assert len(names), names\n dataset_dicts = [DatasetCatalog.get(dataset_name) for dataset_name in names]\n for dataset_name, dicts in zip(names, dataset_dicts):\n assert len(dicts), \"Dataset '{}' is empty!\".format(dataset_name)\n\n if proposal_files is not None:\n assert len(names) == len(proposal_files)\n # load precomputed proposals from proposal files\n dataset_dicts = [\n load_proposals_into_dataset(dataset_i_dicts, proposal_file)\n for dataset_i_dicts, proposal_file in zip(dataset_dicts, proposal_files)\n ]\n\n if isinstance(dataset_dicts[0], torchdata.Dataset):\n return torchdata.ConcatDataset(dataset_dicts)\n\n dataset_dicts = list(itertools.chain.from_iterable(dataset_dicts))\n\n has_instances = \"annotations\" in dataset_dicts[0]\n if filter_empty and has_instances:\n dataset_dicts = filter_images_with_only_crowd_annotations(dataset_dicts)\n if min_keypoints > 0 and has_instances:\n dataset_dicts = filter_images_with_few_keypoints(dataset_dicts, min_keypoints)\n\n if check_consistency and has_instances:\n try:\n class_names = MetadataCatalog.get(names[0]).thing_classes\n check_metadata_consistency(\"thing_classes\", names)\n print_instances_class_histogram(dataset_dicts, class_names)\n except AttributeError: # class names are not available for this dataset\n pass\n\n assert len(dataset_dicts), \"No valid data found in {}.\".format(\",\".join(names))\n return dataset_dicts"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "number_of_rotatable_bonds", "code": "def number_of_rotatable_bonds(mol: Mol) -> int:\n \"\"\"\n Number of rotatable bonds.\n\n Calculates the total number of rotatable bonds in the molecule.\n\n Parameters\n ----------\n mol : RDKit ``Mol`` object\n The molecule for which the number of rotatable bonds is to be calculated.\n\n Examples\n --------\n >>> from rdkit.Chem import MolFromSmiles\n >>> from skfp.descriptors import number_of_rotatable_bonds\n >>> mol = MolFromSmiles(\"C=CC=C\") # Butadiene\n >>> number_of_rotatable_bonds(mol)\n 1\n \"\"\"\n return CalcNumRotatableBonds(mol)", "docstring": "\"\"\"\n Number of rotatable bonds.\n\n Calculates the total number of rotatable bonds in the molecule.\n\n Parameters\n ----------\n mol : RDKit ``Mol`` object\n The molecule for which the number of rotatable bonds is to be calculated.\n\n Examples\n --------\n >>> from rdkit.Chem import MolFromSmiles\n >>> from skfp.descriptors import number_of_rotatable_bonds\n >>> mol = MolFromSmiles(\"C=CC=C\") # Butadiene\n >>> number_of_rotatable_bonds(mol)\n 1\n \"\"\"", "url": "https://github.com/scikit-fingerprints/scikit-fingerprints/blob/bf4faa208aa115873b138f20543800679bc59da2/skfp/descriptors/constitutional.py#L193-L212", "sha": "bf4faa208aa115873b138f20543800679bc59da2", "code/function": "def number_of_rotatable_bonds(mol: Mol) -> int:\n \n return CalcNumRotatableBonds(mol)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_reg_loss", "code": "def _reg_loss(self, regr, gt_regr, mask):\n \"\"\"L1 regression loss\n Arguments:\n regr (batch x max_objects x dim)\n gt_regr (batch x max_objects x dim)\n mask (batch x max_objects)\n \"\"\"\n num = mask.float().sum()\n mask = mask.unsqueeze(2).expand_as(gt_regr).clone().float()\n isnotnan = (~torch.isnan(gt_regr)).float()\n mask *= isnotnan\n regr = regr * mask\n gt_regr = gt_regr * mask\n\n loss = torch.abs(regr - gt_regr)\n loss = loss.transpose(2, 0).contiguous()\n\n loss = torch.sum(loss, dim=2)\n loss = torch.sum(loss, dim=1)\n\n num = reduce_mean(num)\n loss = loss / (num + 1e-4)\n return loss", "docstring": "\"\"\"L1 regression loss\n Arguments:\n regr (batch x max_objects x dim)\n gt_regr (batch x max_objects x dim)\n mask (batch x max_objects)\n \"\"\"", "url": "https://github.com/megvii-research/CVPR2023-UniDistill/blob/32f02b4304cdf435b83b2265f59fdfed2710c3a5/unidistill/layers/losses/det3d.py#L394-L416", "sha": "32f02b4304cdf435b83b2265f59fdfed2710c3a5", "code/function": "def _reg_loss(self, regr, gt_regr, mask):\n \n num = mask.float().sum()\n mask = mask.unsqueeze(2).expand_as(gt_regr).clone().float()\n isnotnan = (~torch.isnan(gt_regr)).float()\n mask *= isnotnan\n regr = regr * mask\n gt_regr = gt_regr * mask\n\n loss = torch.abs(regr - gt_regr)\n loss = loss.transpose(2, 0).contiguous()\n\n loss = torch.sum(loss, dim=2)\n loss = torch.sum(loss, dim=1)\n\n num = reduce_mean(num)\n loss = loss / (num + 1e-4)\n return loss"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "run_train", "code": "def run_train(\n cfg: DefaultConfigProblemBase,\n model: torch.nn.Module,\n optimizer,\n scheduler,\n epoch_steps,\n train_dataloader,\n val_dataloader,\n val_df: pd.DataFrame,\n):\n \"\"\"Runs the training loop.\n\n Args:\n cfg: DefaultConfigProblemBase config object\n model: model\n train_dataloader: custom training Dataloader\n train_df: train DataFrame\n val_dataloader: custom validation Dataloader\n val_df: validation DataFrame\n\n Returns:\n Validation prediction output\n Validation loss\n Validation metric\n Last train batch\n \"\"\"\n if (\n hasattr(cfg.augmentation, \"neftune_noise_alpha\")\n and cfg.augmentation.neftune_noise_alpha > 0\n ):\n activate_neftune(model, cfg.augmentation.neftune_noise_alpha)\n\n scaler: GradScaler | None = None\n if cfg.environment.mixed_precision:\n scaler = GradScaler(\n enabled=(cfg.environment.mixed_precision_dtype == \"float16\")\n )\n\n optimizer.zero_grad(set_to_none=True)\n\n # Prepare NLP Augmentation\n nlp_augment = None\n if hasattr(cfg.augmentation, \"nlp_augmentations_class\"):\n nlp_augment = cfg.augmentation.nlp_augmentations_class(cfg=cfg)\n\n start_epoch = 0\n\n _, metric_mode, _ = cfg.prediction.metric_class.get(cfg.prediction.metric)\n objective_op: Callable[[float, float], bool]\n if metric_mode == \"max\":\n best_val_metric = -np.inf\n objective_op = np.greater\n else:\n best_val_metric = np.inf\n objective_op = np.less\n\n if cfg.training.evaluate_before_training:\n val_loss, val_metric = run_eval(\n cfg=cfg, model=model, val_dataloader=val_dataloader, val_df=val_df\n )\n\n for epoch in range(start_epoch, cfg.training.epochs):\n set_seed(\n cfg.environment._seed\n + epoch * cfg.environment._world_size * cfg.environment.number_of_workers\n + cfg.environment._local_rank * cfg.environment.number_of_workers\n )\n logger.info(f\"Training Epoch: {epoch + 1} / {cfg.training.epochs}\")\n\n if (\n cfg.environment._distributed\n and not cfg.environment.use_deepspeed\n and hasattr(train_dataloader.sampler, \"set_epoch\")\n ):\n train_dataloader.sampler.set_epoch(epoch) # type: ignore\n\n tqdm_out = TqdmToLogger(logger, level=logging.INFO)\n progress_bar = tqdm(\n total=epoch_steps,\n disable=cfg.environment._local_rank != 0,\n file=tqdm_out,\n ascii=True,\n desc=\"train loss\",\n mininterval=0,\n )\n tr_it = iter(train_dataloader)\n\n losses = []\n model.train()\n\n log_update_steps = max(epoch_steps // 20, 1)\n evaluation_step = max(int(epoch_steps * cfg.training.evaluation_epochs), 1)\n logger.info(f\"Evaluation step: {evaluation_step}\")\n\n for itr, data in enumerate(tr_it):\n cfg.environment._curr_step += (\n cfg.training.batch_size * cfg.environment._world_size\n )\n\n # Batch to device\n batch = cfg.dataset.dataset_class.batch_to_device(\n data, cfg.environment._device\n )\n\n # NLP augmentation\n if nlp_augment is not None:\n batch = nlp_augment(batch)\n\n # Plot first batch\n if epoch == 0 and itr == 0 and cfg.environment._local_rank == 0:\n plot = cfg.logging.plots_class.plot_batch(batch=batch, cfg=cfg)\n log_plot(cfg, plot, \"train_data\")\n\n # only need to sync gradients at last step of grad accumulation\n model.require_backward_grad_sync = itr % cfg.training.grad_accumulation == 0\n\n # Forward pass\n with autocast(\n enabled=cfg.environment.mixed_precision,\n dtype=get_torch_dtype(cfg.environment.mixed_precision_dtype),\n ):\n output_dict = model.forward(batch)\n\n loss = output_dict[\"loss\"]\n if ~np.isfinite(loss.item()) and (epoch > start_epoch or itr > 20):\n raise LLMTrainingException(\n \"NaN caught in loss during training. \"\n \"Please, reduce learning rate, change dtype, \"\n \"or disable mixed precision. Alternatively, \"\n \"gradient clipping may help to stabilize training.\"\n )\n losses.append(loss.item())\n\n # loss is a mean loss per batch/sample\n # as grad_accumulations sums up the gradients, this loss must be scaled\n # by the number of grad_accumulations, to have similar behavior for\n # BS * grad_accumulations = const.\n if cfg.training.grad_accumulation != 1:\n loss = loss / cfg.training.grad_accumulation\n\n # Backward pass\n if (\n cfg.environment.mixed_precision\n and len(cfg.environment.gpus)\n and not cfg.environment.use_deepspeed\n ):\n scaler.scale(loss).backward() # type: ignore\n if itr % cfg.training.grad_accumulation == 0:\n if cfg.training.gradient_clip > 0:\n scaler.unscale_(optimizer) # type: ignore\n torch.nn.utils.clip_grad_norm_(\n model.parameters(), cfg.training.gradient_clip\n )\n scaler.step(optimizer) # type: ignore\n scaler.update()\n optimizer.zero_grad(set_to_none=True)\n else:\n if cfg.environment.use_deepspeed:\n model.backward(loss) # type: ignore[operator]\n else:\n loss.backward()\n if itr % cfg.training.grad_accumulation == 0:\n if cfg.training.gradient_clip > 0:\n torch.nn.utils.clip_grad_norm_(\n model.parameters(), cfg.training.gradient_clip\n )\n optimizer.step()\n optimizer.zero_grad(set_to_none=True)\n\n if cfg.environment._distributed:\n torch.cuda.synchronize(device=cfg.environment._local_rank)\n\n if scheduler is not None:\n scheduler.step()\n\n if cfg.environment._local_rank == 0:\n cfg.logging._logger.log(\n \"train\",\n \"loss\",\n losses[-1],\n step=cfg.environment._curr_step\n / cfg.environment._step_log_denominator,\n )\n cfg.logging._logger.log(\n \"meta\",\n \"lr\",\n optimizer.param_groups[0][\"lr\"],\n step=cfg.environment._curr_step\n / cfg.environment._step_log_denominator,\n )\n if cfg.training.differential_learning_rate_layers:\n cfg.logging._logger.log(\n \"meta\",\n \"lr_diff\",\n optimizer.param_groups[2][\"lr\"],\n step=cfg.environment._curr_step\n / cfg.environment._step_log_denominator,\n )\n\n cfg.logging._logger.log(\n \"internal\",\n \"current_step\",\n cfg.environment._curr_step,\n )\n for key in output_dict:\n if key.startswith(\"additional_log_\"):\n cfg.logging._logger.log(\n \"train\",\n key.replace(\"additional_log_\", \"\"),\n output_dict[key].item(),\n step=cfg.environment._curr_step\n / cfg.environment._step_log_denominator,\n )\n\n # Show logs each 5% of the epoch (only if doing per epoch evaluation)\n if (itr + 1) % log_update_steps == 0 or itr == epoch_steps - 1:\n progress_bar.set_description(\n f\"train loss: {np.mean(losses[-10:]):.2f}\", refresh=False\n )\n if (itr + 1) % log_update_steps == 0:\n progress_bar.update(log_update_steps)\n else:\n progress_bar.update(epoch_steps % log_update_steps)\n\n del output_dict\n\n # Validation loop\n if (itr + 1) % evaluation_step == 0:\n # TODO: Move back after fixing slow generation of deepspeed.\n if cfg.training.save_checkpoint == \"last\":\n logger.info(\n f\"Saving last model checkpoint to {cfg.output_directory}\"\n )\n save_checkpoint(model=model, path=cfg.output_directory, cfg=cfg)\n elif cfg.training.save_checkpoint == \"each_evaluation_epoch\":\n checkpoint_path = os.path.join(\n cfg.output_directory, f\"epoch_{epoch}_step_{itr}\"\n )\n logger.info(f\"Saving model checkpoint to {checkpoint_path}\")\n save_checkpoint(model=model, path=checkpoint_path, cfg=cfg)\n create_symlinks_in_parent_folder(checkpoint_path)\n\n val_loss, val_metric = run_eval(\n cfg=cfg, model=model, val_dataloader=val_dataloader, val_df=val_df\n )\n\n if cfg.training.save_checkpoint == \"best\":\n if objective_op(val_metric, best_val_metric):\n logger.info(\n f\"Saving best model checkpoint: \"\n f\"val_{cfg.prediction.metric} {best_val_metric:.5} -> \"\n f\"{val_metric:.5} to {cfg.output_directory}\"\n )\n save_checkpoint(model=model, path=cfg.output_directory, cfg=cfg)\n best_val_metric = val_metric\n\n model.train()\n\n progress_bar.close()\n del progress_bar\n\n if cfg.environment._distributed:\n torch.cuda.synchronize(device=cfg.environment._local_rank)\n torch.distributed.barrier()\n\n if cfg.environment._local_rank == 0:\n cfg.logging._logger.log(\"internal\", \"epoch\", epoch + 1)\n\n if cfg.environment._distributed:\n torch.distributed.barrier()\n\n return val_loss, val_metric", "docstring": "\"\"\"Runs the training loop.\n\n Args:\n cfg: DefaultConfigProblemBase config object\n model: model\n train_dataloader: custom training Dataloader\n train_df: train DataFrame\n val_dataloader: custom validation Dataloader\n val_df: validation DataFrame\n\n Returns:\n Validation prediction output\n Validation loss\n Validation metric\n Last train batch\n \"\"\"", "url": "https://github.com/h2oai/h2o-llmstudio/blob/39f5709ff6ad6db08282df0648352b6a88cb749d/llm_studio/train.py#L165-L436", "sha": "39f5709ff6ad6db08282df0648352b6a88cb749d", "code/function": "def run_train(\n cfg: DefaultConfigProblemBase,\n model: torch.nn.Module,\n optimizer,\n scheduler,\n epoch_steps,\n train_dataloader,\n val_dataloader,\n val_df: pd.DataFrame,\n):\n \n if (\n hasattr(cfg.augmentation, \"neftune_noise_alpha\")\n and cfg.augmentation.neftune_noise_alpha > 0\n ):\n activate_neftune(model, cfg.augmentation.neftune_noise_alpha)\n\n scaler: GradScaler | None = None\n if cfg.environment.mixed_precision:\n scaler = GradScaler(\n enabled=(cfg.environment.mixed_precision_dtype == \"float16\")\n )\n\n optimizer.zero_grad(set_to_none=True)\n\n # Prepare NLP Augmentation\n nlp_augment = None\n if hasattr(cfg.augmentation, \"nlp_augmentations_class\"):\n nlp_augment = cfg.augmentation.nlp_augmentations_class(cfg=cfg)\n\n start_epoch = 0\n\n _, metric_mode, _ = cfg.prediction.metric_class.get(cfg.prediction.metric)\n objective_op: Callable[[float, float], bool]\n if metric_mode == \"max\":\n best_val_metric = -np.inf\n objective_op = np.greater\n else:\n best_val_metric = np.inf\n objective_op = np.less\n\n if cfg.training.evaluate_before_training:\n val_loss, val_metric = run_eval(\n cfg=cfg, model=model, val_dataloader=val_dataloader, val_df=val_df\n )\n\n for epoch in range(start_epoch, cfg.training.epochs):\n set_seed(\n cfg.environment._seed\n + epoch * cfg.environment._world_size * cfg.environment.number_of_workers\n + cfg.environment._local_rank * cfg.environment.number_of_workers\n )\n logger.info(f\"Training Epoch: {epoch + 1} / {cfg.training.epochs}\")\n\n if (\n cfg.environment._distributed\n and not cfg.environment.use_deepspeed\n and hasattr(train_dataloader.sampler, \"set_epoch\")\n ):\n train_dataloader.sampler.set_epoch(epoch) # type: ignore\n\n tqdm_out = TqdmToLogger(logger, level=logging.INFO)\n progress_bar = tqdm(\n total=epoch_steps,\n disable=cfg.environment._local_rank != 0,\n file=tqdm_out,\n ascii=True,\n desc=\"train loss\",\n mininterval=0,\n )\n tr_it = iter(train_dataloader)\n\n losses = []\n model.train()\n\n log_update_steps = max(epoch_steps // 20, 1)\n evaluation_step = max(int(epoch_steps * cfg.training.evaluation_epochs), 1)\n logger.info(f\"Evaluation step: {evaluation_step}\")\n\n for itr, data in enumerate(tr_it):\n cfg.environment._curr_step += (\n cfg.training.batch_size * cfg.environment._world_size\n )\n\n # Batch to device\n batch = cfg.dataset.dataset_class.batch_to_device(\n data, cfg.environment._device\n )\n\n # NLP augmentation\n if nlp_augment is not None:\n batch = nlp_augment(batch)\n\n # Plot first batch\n if epoch == 0 and itr == 0 and cfg.environment._local_rank == 0:\n plot = cfg.logging.plots_class.plot_batch(batch=batch, cfg=cfg)\n log_plot(cfg, plot, \"train_data\")\n\n # only need to sync gradients at last step of grad accumulation\n model.require_backward_grad_sync = itr % cfg.training.grad_accumulation == 0\n\n # Forward pass\n with autocast(\n enabled=cfg.environment.mixed_precision,\n dtype=get_torch_dtype(cfg.environment.mixed_precision_dtype),\n ):\n output_dict = model.forward(batch)\n\n loss = output_dict[\"loss\"]\n if ~np.isfinite(loss.item()) and (epoch > start_epoch or itr > 20):\n raise LLMTrainingException(\n \"NaN caught in loss during training. \"\n \"Please, reduce learning rate, change dtype, \"\n \"or disable mixed precision. Alternatively, \"\n \"gradient clipping may help to stabilize training.\"\n )\n losses.append(loss.item())\n\n # loss is a mean loss per batch/sample\n # as grad_accumulations sums up the gradients, this loss must be scaled\n # by the number of grad_accumulations, to have similar behavior for\n # BS * grad_accumulations = const.\n if cfg.training.grad_accumulation != 1:\n loss = loss / cfg.training.grad_accumulation\n\n # Backward pass\n if (\n cfg.environment.mixed_precision\n and len(cfg.environment.gpus)\n and not cfg.environment.use_deepspeed\n ):\n scaler.scale(loss).backward() # type: ignore\n if itr % cfg.training.grad_accumulation == 0:\n if cfg.training.gradient_clip > 0:\n scaler.unscale_(optimizer) # type: ignore\n torch.nn.utils.clip_grad_norm_(\n model.parameters(), cfg.training.gradient_clip\n )\n scaler.step(optimizer) # type: ignore\n scaler.update()\n optimizer.zero_grad(set_to_none=True)\n else:\n if cfg.environment.use_deepspeed:\n model.backward(loss) # type: ignore[operator]\n else:\n loss.backward()\n if itr % cfg.training.grad_accumulation == 0:\n if cfg.training.gradient_clip > 0:\n torch.nn.utils.clip_grad_norm_(\n model.parameters(), cfg.training.gradient_clip\n )\n optimizer.step()\n optimizer.zero_grad(set_to_none=True)\n\n if cfg.environment._distributed:\n torch.cuda.synchronize(device=cfg.environment._local_rank)\n\n if scheduler is not None:\n scheduler.step()\n\n if cfg.environment._local_rank == 0:\n cfg.logging._logger.log(\n \"train\",\n \"loss\",\n losses[-1],\n step=cfg.environment._curr_step\n / cfg.environment._step_log_denominator,\n )\n cfg.logging._logger.log(\n \"meta\",\n \"lr\",\n optimizer.param_groups[0][\"lr\"],\n step=cfg.environment._curr_step\n / cfg.environment._step_log_denominator,\n )\n if cfg.training.differential_learning_rate_layers:\n cfg.logging._logger.log(\n \"meta\",\n \"lr_diff\",\n optimizer.param_groups[2][\"lr\"],\n step=cfg.environment._curr_step\n / cfg.environment._step_log_denominator,\n )\n\n cfg.logging._logger.log(\n \"internal\",\n \"current_step\",\n cfg.environment._curr_step,\n )\n for key in output_dict:\n if key.startswith(\"additional_log_\"):\n cfg.logging._logger.log(\n \"train\",\n key.replace(\"additional_log_\", \"\"),\n output_dict[key].item(),\n step=cfg.environment._curr_step\n / cfg.environment._step_log_denominator,\n )\n\n # Show logs each 5% of the epoch (only if doing per epoch evaluation)\n if (itr + 1) % log_update_steps == 0 or itr == epoch_steps - 1:\n progress_bar.set_description(\n f\"train loss: {np.mean(losses[-10:]):.2f}\", refresh=False\n )\n if (itr + 1) % log_update_steps == 0:\n progress_bar.update(log_update_steps)\n else:\n progress_bar.update(epoch_steps % log_update_steps)\n\n del output_dict\n\n # Validation loop\n if (itr + 1) % evaluation_step == 0:\n # TODO: Move back after fixing slow generation of deepspeed.\n if cfg.training.save_checkpoint == \"last\":\n logger.info(\n f\"Saving last model checkpoint to {cfg.output_directory}\"\n )\n save_checkpoint(model=model, path=cfg.output_directory, cfg=cfg)\n elif cfg.training.save_checkpoint == \"each_evaluation_epoch\":\n checkpoint_path = os.path.join(\n cfg.output_directory, f\"epoch_{epoch}_step_{itr}\"\n )\n logger.info(f\"Saving model checkpoint to {checkpoint_path}\")\n save_checkpoint(model=model, path=checkpoint_path, cfg=cfg)\n create_symlinks_in_parent_folder(checkpoint_path)\n\n val_loss, val_metric = run_eval(\n cfg=cfg, model=model, val_dataloader=val_dataloader, val_df=val_df\n )\n\n if cfg.training.save_checkpoint == \"best\":\n if objective_op(val_metric, best_val_metric):\n logger.info(\n f\"Saving best model checkpoint: \"\n f\"val_{cfg.prediction.metric} {best_val_metric:.5} -> \"\n f\"{val_metric:.5} to {cfg.output_directory}\"\n )\n save_checkpoint(model=model, path=cfg.output_directory, cfg=cfg)\n best_val_metric = val_metric\n\n model.train()\n\n progress_bar.close()\n del progress_bar\n\n if cfg.environment._distributed:\n torch.cuda.synchronize(device=cfg.environment._local_rank)\n torch.distributed.barrier()\n\n if cfg.environment._local_rank == 0:\n cfg.logging._logger.log(\"internal\", \"epoch\", epoch + 1)\n\n if cfg.environment._distributed:\n torch.distributed.barrier()\n\n return val_loss, val_metric"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "create_custom_chat_agent", "code": "def create_custom_chat_agent(\n name: str, llm_config: OpenAIGPTConfig, system_message: str\n) -> ChatAgent:\n \"\"\"creates a ChatAgent with the given parameters.\n\n Args:\n name (str): The name of the agent.\n llm_config (OpenAIGPTConfig): The LLM configuration for the agent.\n system_message (str): The system message to guide the agent's LLM.\n\n Returns:\n ChatAgent: A configured ChatAgent instance.\n \"\"\"\n # Modify the system message to include instructions for the agent\n additional_system_message = \"\"\"**Response format (strictly follow this structure):** \n Pro: \n - [First key point] \n - [Second key point] \n - [Third key point]\n **Limit responses to exactly 3 points expressed as single sentences.**\"\n \"\"\"\n system_message = f\"\"\"\n Start your response with '{name}: ' and then follow the instructions below.\n {system_message} {additional_system_message}\n \"\"\"\n return ChatAgent(\n ChatAgentConfig(\n llm=llm_config,\n name=name,\n system_message=system_message,\n )\n )", "docstring": "\"\"\"creates a ChatAgent with the given parameters.\n\n Args:\n name (str): The name of the agent.\n llm_config (OpenAIGPTConfig): The LLM configuration for the agent.\n system_message (str): The system message to guide the agent's LLM.\n\n Returns:\n ChatAgent: A configured ChatAgent instance.\n \"\"\"", "url": "https://github.com/langroid/langroid/blob/8eecdf99b42a0cb488522cc950e0890e3e9b72ed/examples/multi-agent-debate/main_chainlit.py#L89-L120", "sha": "8eecdf99b42a0cb488522cc950e0890e3e9b72ed", "code/function": "def create_custom_chat_agent(\n name: str, llm_config: OpenAIGPTConfig, system_message: str\n) -> ChatAgent:\n \n # Modify the system message to include instructions for the agent\n additional_system_message = \"\"\"**Response format (strictly follow this structure):** \n Pro: \n - [First key point] \n - [Second key point] \n - [Third key point]\n **Limit responses to exactly 3 points expressed as single sentences.**\"\n \"\"\"\n system_message = f\"\"\"\n Start your response with '{name}: ' and then follow the instructions below.\n {system_message} {additional_system_message}\n \"\"\"\n return ChatAgent(\n ChatAgentConfig(\n llm=llm_config,\n name=name,\n system_message=system_message,\n )\n )"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "clear_all_collections", "code": "def clear_all_collections(self, really: bool = False, prefix: str = \"\") -> int:\n \"\"\"Clear all collections with the given prefix.\"\"\"\n\n if not really:\n logger.warning(\"Not deleting all collections, set really=True to confirm\")\n return 0\n coll_names = self.list_collections(empty=False)\n coll_names = [name for name in coll_names if name.startswith(prefix)]\n if len(coll_names) == 0:\n logger.warning(f\"No collections found with prefix {prefix}\")\n return 0\n for name in coll_names:\n self.delete_collection(name)\n logger.warning(\n f\"\"\"\n Deleted {len(coll_names)} indices from Momento VI\n \"\"\"\n )\n return len(coll_names)", "docstring": "\"\"\"Clear all collections with the given prefix.\"\"\"", "url": "https://github.com/langroid/langroid/blob/8eecdf99b42a0cb488522cc950e0890e3e9b72ed/langroid/vector_store/momento.py#L99-L117", "sha": "8eecdf99b42a0cb488522cc950e0890e3e9b72ed", "code/function": "def clear_all_collections(self, really: bool = False, prefix: str = \"\") -> int:\n \n\n if not really:\n logger.warning(\"Not deleting all collections, set really=True to confirm\")\n return 0\n coll_names = self.list_collections(empty=False)\n coll_names = [name for name in coll_names if name.startswith(prefix)]\n if len(coll_names) == 0:\n logger.warning(f\"No collections found with prefix {prefix}\")\n return 0\n for name in coll_names:\n self.delete_collection(name)\n logger.warning(\n f\"\"\"\n Deleted {len(coll_names)} indices from Momento VI\n \"\"\"\n )\n return len(coll_names)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "T5EncoderDecoderInitHelper.verify_onnx", "code": "@staticmethod\n def verify_onnx(\n model: T5EncoderDecoderInit,\n ort_session: InferenceSession,\n device: torch.device,\n max_cases=4,\n ):\n \"\"\"Compare the result from PyTorch and OnnxRuntime to verify the ONNX model is good.\"\"\"\n ort_inputs = ort_session.get_inputs()\n use_decoder_input_ids = len(ort_inputs) == 3\n\n test_cases = [(4, 11), (1, 2), (3, 1), (8, 5)]\n test_cases_max_diff = []\n for (batch_size, encode_sequence_length) in test_cases[:max_cases]:\n inputs = T5EncoderDecoderInitInputs.create_dummy(\n model.config,\n batch_size,\n encode_sequence_length,\n use_decoder_input_ids=use_decoder_input_ids,\n device=device,\n )\n\n ort_outputs = T5EncoderDecoderInitHelper.onnxruntime_inference(ort_session, inputs)\n\n # Run inference of PyTorch model\n input_list = inputs.to_list()\n torch_outputs = model(*input_list)\n\n assert torch_outputs[0].cpu().numpy().shape == ort_outputs['logits'].shape\n max_diff = numpy.amax(numpy.abs(torch_outputs[0].cpu().numpy() - ort_outputs['logits'].cpu().numpy()))\n logger.debug(f\"logits max_diff={max_diff}\")\n max_diff_all = max_diff\n\n assert torch_outputs[1].cpu().numpy().shape == ort_outputs['encoder_hidden_states'].shape\n max_diff = numpy.amax(\n numpy.abs(torch_outputs[1].cpu().numpy() - ort_outputs['encoder_hidden_states'].cpu().numpy()))\n logger.debug(f\"encoder_hidden_states max_diff={max_diff}\")\n max_diff_all = max(max_diff_all, max_diff)\n\n for i in range(2 * model.config.num_layers):\n if i % 2 == 0:\n ort_outputs_i = ort_outputs[f'present_key_self_{i//2}']\n else:\n ort_outputs_i = ort_outputs[f'present_value_self_{i//2}']\n max_diff = numpy.amax(numpy.abs(torch_outputs[2][i].cpu().numpy() - ort_outputs_i.cpu().numpy()))\n logger.debug(f\"self attention past state {i} max_diff={max_diff}\")\n\n for i in range(2 * model.config.num_layers):\n if i % 2 == 0:\n ort_outputs_i = ort_outputs[f'present_key_cross_{i//2}']\n else:\n ort_outputs_i = ort_outputs[f'present_value_cross_{i//2}']\n max_diff = numpy.amax(\n numpy.abs(torch_outputs[3][i].cpu().numpy() - ort_outputs_i.cpu().numpy())\n )\n logger.debug(f\"cross attention past state {i} max_diff={max_diff}\")\n max_diff_all = max(max_diff_all, max_diff)\n\n test_cases_max_diff.append(max_diff_all)\n logger.info(\n f\"batch_size={batch_size} encode_sequence_length={encode_sequence_length}, max_diff={max_diff_all}\"\n )\n\n return max(test_cases_max_diff)", "docstring": "\"\"\"Compare the result from PyTorch and OnnxRuntime to verify the ONNX model is good.\"\"\"", "url": "https://github.com/X-PLUG/ChatPLUG/blob/3f2b8608f59e443214a22d123faaa5930fb3b783/XDPX/xdpx/utils/thirdparty/onnx_transformers/models/t5/t5_encoder_decoder_init.py#L219-L282", "sha": "3f2b8608f59e443214a22d123faaa5930fb3b783", "code/function": "@staticmethod\n def verify_onnx(\n model: T5EncoderDecoderInit,\n ort_session: InferenceSession,\n device: torch.device,\n max_cases=4,\n ):\n \n ort_inputs = ort_session.get_inputs()\n use_decoder_input_ids = len(ort_inputs) == 3\n\n test_cases = [(4, 11), (1, 2), (3, 1), (8, 5)]\n test_cases_max_diff = []\n for (batch_size, encode_sequence_length) in test_cases[:max_cases]:\n inputs = T5EncoderDecoderInitInputs.create_dummy(\n model.config,\n batch_size,\n encode_sequence_length,\n use_decoder_input_ids=use_decoder_input_ids,\n device=device,\n )\n\n ort_outputs = T5EncoderDecoderInitHelper.onnxruntime_inference(ort_session, inputs)\n\n # Run inference of PyTorch model\n input_list = inputs.to_list()\n torch_outputs = model(*input_list)\n\n assert torch_outputs[0].cpu().numpy().shape == ort_outputs['logits'].shape\n max_diff = numpy.amax(numpy.abs(torch_outputs[0].cpu().numpy() - ort_outputs['logits'].cpu().numpy()))\n logger.debug(f\"logits max_diff={max_diff}\")\n max_diff_all = max_diff\n\n assert torch_outputs[1].cpu().numpy().shape == ort_outputs['encoder_hidden_states'].shape\n max_diff = numpy.amax(\n numpy.abs(torch_outputs[1].cpu().numpy() - ort_outputs['encoder_hidden_states'].cpu().numpy()))\n logger.debug(f\"encoder_hidden_states max_diff={max_diff}\")\n max_diff_all = max(max_diff_all, max_diff)\n\n for i in range(2 * model.config.num_layers):\n if i % 2 == 0:\n ort_outputs_i = ort_outputs[f'present_key_self_{i//2}']\n else:\n ort_outputs_i = ort_outputs[f'present_value_self_{i//2}']\n max_diff = numpy.amax(numpy.abs(torch_outputs[2][i].cpu().numpy() - ort_outputs_i.cpu().numpy()))\n logger.debug(f\"self attention past state {i} max_diff={max_diff}\")\n\n for i in range(2 * model.config.num_layers):\n if i % 2 == 0:\n ort_outputs_i = ort_outputs[f'present_key_cross_{i//2}']\n else:\n ort_outputs_i = ort_outputs[f'present_value_cross_{i//2}']\n max_diff = numpy.amax(\n numpy.abs(torch_outputs[3][i].cpu().numpy() - ort_outputs_i.cpu().numpy())\n )\n logger.debug(f\"cross attention past state {i} max_diff={max_diff}\")\n max_diff_all = max(max_diff_all, max_diff)\n\n test_cases_max_diff.append(max_diff_all)\n logger.info(\n f\"batch_size={batch_size} encode_sequence_length={encode_sequence_length}, max_diff={max_diff_all}\"\n )\n\n return max(test_cases_max_diff)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forward", "code": "def forward(self, x):\n \"\"\"Forward pass.\n\n Args:\n x (tensor): input\n\n Returns:\n tensor: interpolated data\n \"\"\"\n\n x = self.interp(\n x, scale_factor=self.scale_factor, mode=self.mode, align_corners=self.align_corners\n )\n\n return x", "docstring": "\"\"\"Forward pass.\n\n Args:\n x (tensor): input\n\n Returns:\n tensor: interpolated data\n \"\"\"", "url": "https://github.com/wolverinn/stable-diffusion-multi-user/blob/1d79ad90de9c75692bd8e49d57679697dbefd393/extensions/sd-webui-controlnet/annotator/zoe/zoedepth/models/base_models/midas_repo/midas/blocks.py#L226-L240", "sha": "1d79ad90de9c75692bd8e49d57679697dbefd393", "code/function": "def forward(self, x):\n \n\n x = self.interp(\n x, scale_factor=self.scale_factor, mode=self.mode, align_corners=self.align_corners\n )\n\n return x"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UnCLIPPipeline.__call__", "code": "@torch.no_grad()\n def __call__(\n self,\n prompt: Optional[Union[str, List[str]]] = None,\n num_images_per_prompt: int = 1,\n prior_num_inference_steps: int = 25,\n decoder_num_inference_steps: int = 25,\n super_res_num_inference_steps: int = 7,\n generator: Optional[torch.Generator] = None,\n prior_latents: Optional[torch.FloatTensor] = None,\n decoder_latents: Optional[torch.FloatTensor] = None,\n super_res_latents: Optional[torch.FloatTensor] = None,\n text_model_output: Optional[Union[CLIPTextModelOutput, Tuple]] = None,\n text_attention_mask: Optional[torch.Tensor] = None,\n prior_guidance_scale: float = 4.0,\n decoder_guidance_scale: float = 8.0,\n output_type: Optional[str] = \"pil\",\n return_dict: bool = True,\n ):\n \"\"\"\n Function invoked when calling the pipeline for generation.\n Args:\n prompt (`str` or `List[str]`):\n The prompt or prompts to guide the image generation. This can only be left undefined if\n `text_model_output` and `text_attention_mask` is passed.\n num_images_per_prompt (`int`, *optional*, defaults to 1):\n The number of images to generate per prompt.\n prior_num_inference_steps (`int`, *optional*, defaults to 25):\n The number of denoising steps for the prior. More denoising steps usually lead to a higher quality\n image at the expense of slower inference.\n decoder_num_inference_steps (`int`, *optional*, defaults to 25):\n The number of denoising steps for the decoder. More denoising steps usually lead to a higher quality\n image at the expense of slower inference.\n super_res_num_inference_steps (`int`, *optional*, defaults to 7):\n The number of denoising steps for super resolution. More denoising steps usually lead to a higher\n quality image at the expense of slower inference.\n generator (`torch.Generator`, *optional*):\n One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n to make generation deterministic.\n prior_latents (`torch.FloatTensor` of shape (batch size, embeddings dimension), *optional*):\n Pre-generated noisy latents to be used as inputs for the prior.\n decoder_latents (`torch.FloatTensor` of shape (batch size, channels, height, width), *optional*):\n Pre-generated noisy latents to be used as inputs for the decoder.\n super_res_latents (`torch.FloatTensor` of shape (batch size, channels, super res height, super res width), *optional*):\n Pre-generated noisy latents to be used as inputs for the decoder.\n prior_guidance_scale (`float`, *optional*, defaults to 4.0):\n Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n `guidance_scale` is defined as `w` of equation 2. of [Imagen\n Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n usually at the expense of lower image quality.\n decoder_guidance_scale (`float`, *optional*, defaults to 4.0):\n Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n `guidance_scale` is defined as `w` of equation 2. of [Imagen\n Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n usually at the expense of lower image quality.\n text_model_output (`CLIPTextModelOutput`, *optional*):\n Pre-defined CLIPTextModel outputs that can be derived from the text encoder. Pre-defined text outputs\n can be passed for tasks like text embedding interpolations. Make sure to also pass\n `text_attention_mask` in this case. `prompt` can the be left to `None`.\n text_attention_mask (`torch.Tensor`, *optional*):\n Pre-defined CLIP text attention mask that can be derived from the tokenizer. Pre-defined text attention\n masks are necessary when passing `text_model_output`.\n output_type (`str`, *optional*, defaults to `\"pil\"`):\n The output format of the generated image. Choose between\n [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n return_dict (`bool`, *optional*, defaults to `True`):\n Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n \"\"\"\n if prompt is not None:\n if isinstance(prompt, str):\n batch_size = 1\n elif isinstance(prompt, list):\n batch_size = len(prompt)\n else:\n raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n else:\n batch_size = text_model_output[0].shape[0]\n\n device = self._execution_device\n\n batch_size = batch_size * num_images_per_prompt\n\n do_classifier_free_guidance = prior_guidance_scale > 1.0 or decoder_guidance_scale > 1.0\n\n text_embeddings, text_encoder_hidden_states, text_mask = self._encode_prompt(\n prompt, device, num_images_per_prompt, do_classifier_free_guidance, text_model_output, text_attention_mask\n )\n\n # prior\n\n self.prior_scheduler.set_timesteps(prior_num_inference_steps, device=device)\n prior_timesteps_tensor = self.prior_scheduler.timesteps\n\n embedding_dim = self.prior.config.embedding_dim\n\n prior_latents = self.prepare_latents(\n (batch_size, embedding_dim),\n text_embeddings.dtype,\n device,\n generator,\n prior_latents,\n self.prior_scheduler,\n )\n\n for i, t in enumerate(self.progress_bar(prior_timesteps_tensor)):\n # expand the latents if we are doing classifier free guidance\n latent_model_input = torch.cat([prior_latents] * 2) if do_classifier_free_guidance else prior_latents\n\n predicted_image_embedding = self.prior(\n latent_model_input,\n timestep=t,\n proj_embedding=text_embeddings,\n encoder_hidden_states=text_encoder_hidden_states,\n attention_mask=text_mask,\n ).predicted_image_embedding\n\n if do_classifier_free_guidance:\n predicted_image_embedding_uncond, predicted_image_embedding_text = predicted_image_embedding.chunk(2)\n predicted_image_embedding = predicted_image_embedding_uncond + prior_guidance_scale * (\n predicted_image_embedding_text - predicted_image_embedding_uncond\n )\n\n if i + 1 == prior_timesteps_tensor.shape[0]:\n prev_timestep = None\n else:\n prev_timestep = prior_timesteps_tensor[i + 1]\n\n prior_latents = self.prior_scheduler.step(\n predicted_image_embedding,\n timestep=t,\n sample=prior_latents,\n generator=generator,\n prev_timestep=prev_timestep,\n ).prev_sample\n\n prior_latents = self.prior.post_process_latents(prior_latents)\n\n image_embeddings = prior_latents\n\n # done prior\n\n # decoder\n\n text_encoder_hidden_states, additive_clip_time_embeddings = self.text_proj(\n image_embeddings=image_embeddings,\n text_embeddings=text_embeddings,\n text_encoder_hidden_states=text_encoder_hidden_states,\n do_classifier_free_guidance=do_classifier_free_guidance,\n )\n\n decoder_text_mask = F.pad(text_mask, (self.text_proj.clip_extra_context_tokens, 0), value=1)\n\n self.decoder_scheduler.set_timesteps(decoder_num_inference_steps, device=device)\n decoder_timesteps_tensor = self.decoder_scheduler.timesteps\n\n num_channels_latents = self.decoder.in_channels\n height = self.decoder.sample_size\n width = self.decoder.sample_size\n\n decoder_latents = self.prepare_latents(\n (batch_size, num_channels_latents, height, width),\n text_encoder_hidden_states.dtype,\n device,\n generator,\n decoder_latents,\n self.decoder_scheduler,\n )\n\n for i, t in enumerate(self.progress_bar(decoder_timesteps_tensor)):\n # expand the latents if we are doing classifier free guidance\n latent_model_input = torch.cat([decoder_latents] * 2) if do_classifier_free_guidance else decoder_latents\n\n noise_pred = self.decoder(\n sample=latent_model_input,\n timestep=t,\n encoder_hidden_states=text_encoder_hidden_states,\n class_labels=additive_clip_time_embeddings,\n attention_mask=decoder_text_mask,\n ).sample\n\n if do_classifier_free_guidance:\n noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n noise_pred_uncond, _ = noise_pred_uncond.split(latent_model_input.shape[1], dim=1)\n noise_pred_text, predicted_variance = noise_pred_text.split(latent_model_input.shape[1], dim=1)\n noise_pred = noise_pred_uncond + decoder_guidance_scale * (noise_pred_text - noise_pred_uncond)\n noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n if i + 1 == decoder_timesteps_tensor.shape[0]:\n prev_timestep = None\n else:\n prev_timestep = decoder_timesteps_tensor[i + 1]\n\n # compute the previous noisy sample x_t -> x_t-1\n decoder_latents = self.decoder_scheduler.step(\n noise_pred, t, decoder_latents, prev_timestep=prev_timestep, generator=generator\n ).prev_sample\n\n decoder_latents = decoder_latents.clamp(-1, 1)\n\n image_small = decoder_latents\n\n # done decoder\n\n # super res\n\n self.super_res_scheduler.set_timesteps(super_res_num_inference_steps, device=device)\n super_res_timesteps_tensor = self.super_res_scheduler.timesteps\n\n channels = self.super_res_first.in_channels // 2\n height = self.super_res_first.sample_size\n width = self.super_res_first.sample_size\n\n super_res_latents = self.prepare_latents(\n (batch_size, channels, height, width),\n image_small.dtype,\n device,\n generator,\n super_res_latents,\n self.super_res_scheduler,\n )\n\n interpolate_antialias = {}\n if \"antialias\" in inspect.signature(F.interpolate).parameters:\n interpolate_antialias[\"antialias\"] = True\n\n image_upscaled = F.interpolate(\n image_small, size=[height, width], mode=\"bicubic\", align_corners=False, **interpolate_antialias\n )\n\n for i, t in enumerate(self.progress_bar(super_res_timesteps_tensor)):\n # no classifier free guidance\n\n if i == super_res_timesteps_tensor.shape[0] - 1:\n unet = self.super_res_last\n else:\n unet = self.super_res_first\n\n latent_model_input = torch.cat([super_res_latents, image_upscaled], dim=1)\n\n noise_pred = unet(\n sample=latent_model_input,\n timestep=t,\n ).sample\n\n if i + 1 == super_res_timesteps_tensor.shape[0]:\n prev_timestep = None\n else:\n prev_timestep = super_res_timesteps_tensor[i + 1]\n\n # compute the previous noisy sample x_t -> x_t-1\n super_res_latents = self.super_res_scheduler.step(\n noise_pred, t, super_res_latents, prev_timestep=prev_timestep, generator=generator\n ).prev_sample\n\n image = super_res_latents\n # done super res\n\n # post processing\n\n image = image * 0.5 + 0.5\n image = image.clamp(0, 1)\n image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n if output_type == \"pil\":\n image = self.numpy_to_pil(image)\n\n if not return_dict:\n return (image,)\n\n return ImagePipelineOutput(images=image)", "docstring": "\"\"\"\n Function invoked when calling the pipeline for generation.\n Args:\n prompt (`str` or `List[str]`):\n The prompt or prompts to guide the image generation. This can only be left undefined if\n `text_model_output` and `text_attention_mask` is passed.\n num_images_per_prompt (`int`, *optional*, defaults to 1):\n The number of images to generate per prompt.\n prior_num_inference_steps (`int`, *optional*, defaults to 25):\n The number of denoising steps for the prior. More denoising steps usually lead to a higher quality\n image at the expense of slower inference.\n decoder_num_inference_steps (`int`, *optional*, defaults to 25):\n The number of denoising steps for the decoder. More denoising steps usually lead to a higher quality\n image at the expense of slower inference.\n super_res_num_inference_steps (`int`, *optional*, defaults to 7):\n The number of denoising steps for super resolution. More denoising steps usually lead to a higher\n quality image at the expense of slower inference.\n generator (`torch.Generator`, *optional*):\n One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n to make generation deterministic.\n prior_latents (`torch.FloatTensor` of shape (batch size, embeddings dimension), *optional*):\n Pre-generated noisy latents to be used as inputs for the prior.\n decoder_latents (`torch.FloatTensor` of shape (batch size, channels, height, width), *optional*):\n Pre-generated noisy latents to be used as inputs for the decoder.\n super_res_latents (`torch.FloatTensor` of shape (batch size, channels, super res height, super res width), *optional*):\n Pre-generated noisy latents to be used as inputs for the decoder.\n prior_guidance_scale (`float`, *optional*, defaults to 4.0):\n Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n `guidance_scale` is defined as `w` of equation 2. of [Imagen\n Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n usually at the expense of lower image quality.\n decoder_guidance_scale (`float`, *optional*, defaults to 4.0):\n Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n `guidance_scale` is defined as `w` of equation 2. of [Imagen\n Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n 1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n usually at the expense of lower image quality.\n text_model_output (`CLIPTextModelOutput`, *optional*):\n Pre-defined CLIPTextModel outputs that can be derived from the text encoder. Pre-defined text outputs\n can be passed for tasks like text embedding interpolations. Make sure to also pass\n `text_attention_mask` in this case. `prompt` can the be left to `None`.\n text_attention_mask (`torch.Tensor`, *optional*):\n Pre-defined CLIP text attention mask that can be derived from the tokenizer. Pre-defined text attention\n masks are necessary when passing `text_model_output`.\n output_type (`str`, *optional*, defaults to `\"pil\"`):\n The output format of the generated image. Choose between\n [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n return_dict (`bool`, *optional*, defaults to `True`):\n Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n \"\"\"", "url": "https://github.com/wolverinn/stable-diffusion-multi-user/blob/1d79ad90de9c75692bd8e49d57679697dbefd393/repositories/stable-diffusion-stability-ai/ldm/modules/karlo/diffusers_pipeline.py#L241-L512", "sha": "1d79ad90de9c75692bd8e49d57679697dbefd393", "code/function": "@torch.no_grad()\n def __call__(\n self,\n prompt: Optional[Union[str, List[str]]] = None,\n num_images_per_prompt: int = 1,\n prior_num_inference_steps: int = 25,\n decoder_num_inference_steps: int = 25,\n super_res_num_inference_steps: int = 7,\n generator: Optional[torch.Generator] = None,\n prior_latents: Optional[torch.FloatTensor] = None,\n decoder_latents: Optional[torch.FloatTensor] = None,\n super_res_latents: Optional[torch.FloatTensor] = None,\n text_model_output: Optional[Union[CLIPTextModelOutput, Tuple]] = None,\n text_attention_mask: Optional[torch.Tensor] = None,\n prior_guidance_scale: float = 4.0,\n decoder_guidance_scale: float = 8.0,\n output_type: Optional[str] = \"pil\",\n return_dict: bool = True,\n ):\n \n if prompt is not None:\n if isinstance(prompt, str):\n batch_size = 1\n elif isinstance(prompt, list):\n batch_size = len(prompt)\n else:\n raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n else:\n batch_size = text_model_output[0].shape[0]\n\n device = self._execution_device\n\n batch_size = batch_size * num_images_per_prompt\n\n do_classifier_free_guidance = prior_guidance_scale > 1.0 or decoder_guidance_scale > 1.0\n\n text_embeddings, text_encoder_hidden_states, text_mask = self._encode_prompt(\n prompt, device, num_images_per_prompt, do_classifier_free_guidance, text_model_output, text_attention_mask\n )\n\n # prior\n\n self.prior_scheduler.set_timesteps(prior_num_inference_steps, device=device)\n prior_timesteps_tensor = self.prior_scheduler.timesteps\n\n embedding_dim = self.prior.config.embedding_dim\n\n prior_latents = self.prepare_latents(\n (batch_size, embedding_dim),\n text_embeddings.dtype,\n device,\n generator,\n prior_latents,\n self.prior_scheduler,\n )\n\n for i, t in enumerate(self.progress_bar(prior_timesteps_tensor)):\n # expand the latents if we are doing classifier free guidance\n latent_model_input = torch.cat([prior_latents] * 2) if do_classifier_free_guidance else prior_latents\n\n predicted_image_embedding = self.prior(\n latent_model_input,\n timestep=t,\n proj_embedding=text_embeddings,\n encoder_hidden_states=text_encoder_hidden_states,\n attention_mask=text_mask,\n ).predicted_image_embedding\n\n if do_classifier_free_guidance:\n predicted_image_embedding_uncond, predicted_image_embedding_text = predicted_image_embedding.chunk(2)\n predicted_image_embedding = predicted_image_embedding_uncond + prior_guidance_scale * (\n predicted_image_embedding_text - predicted_image_embedding_uncond\n )\n\n if i + 1 == prior_timesteps_tensor.shape[0]:\n prev_timestep = None\n else:\n prev_timestep = prior_timesteps_tensor[i + 1]\n\n prior_latents = self.prior_scheduler.step(\n predicted_image_embedding,\n timestep=t,\n sample=prior_latents,\n generator=generator,\n prev_timestep=prev_timestep,\n ).prev_sample\n\n prior_latents = self.prior.post_process_latents(prior_latents)\n\n image_embeddings = prior_latents\n\n # done prior\n\n # decoder\n\n text_encoder_hidden_states, additive_clip_time_embeddings = self.text_proj(\n image_embeddings=image_embeddings,\n text_embeddings=text_embeddings,\n text_encoder_hidden_states=text_encoder_hidden_states,\n do_classifier_free_guidance=do_classifier_free_guidance,\n )\n\n decoder_text_mask = F.pad(text_mask, (self.text_proj.clip_extra_context_tokens, 0), value=1)\n\n self.decoder_scheduler.set_timesteps(decoder_num_inference_steps, device=device)\n decoder_timesteps_tensor = self.decoder_scheduler.timesteps\n\n num_channels_latents = self.decoder.in_channels\n height = self.decoder.sample_size\n width = self.decoder.sample_size\n\n decoder_latents = self.prepare_latents(\n (batch_size, num_channels_latents, height, width),\n text_encoder_hidden_states.dtype,\n device,\n generator,\n decoder_latents,\n self.decoder_scheduler,\n )\n\n for i, t in enumerate(self.progress_bar(decoder_timesteps_tensor)):\n # expand the latents if we are doing classifier free guidance\n latent_model_input = torch.cat([decoder_latents] * 2) if do_classifier_free_guidance else decoder_latents\n\n noise_pred = self.decoder(\n sample=latent_model_input,\n timestep=t,\n encoder_hidden_states=text_encoder_hidden_states,\n class_labels=additive_clip_time_embeddings,\n attention_mask=decoder_text_mask,\n ).sample\n\n if do_classifier_free_guidance:\n noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n noise_pred_uncond, _ = noise_pred_uncond.split(latent_model_input.shape[1], dim=1)\n noise_pred_text, predicted_variance = noise_pred_text.split(latent_model_input.shape[1], dim=1)\n noise_pred = noise_pred_uncond + decoder_guidance_scale * (noise_pred_text - noise_pred_uncond)\n noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n if i + 1 == decoder_timesteps_tensor.shape[0]:\n prev_timestep = None\n else:\n prev_timestep = decoder_timesteps_tensor[i + 1]\n\n # compute the previous noisy sample x_t -> x_t-1\n decoder_latents = self.decoder_scheduler.step(\n noise_pred, t, decoder_latents, prev_timestep=prev_timestep, generator=generator\n ).prev_sample\n\n decoder_latents = decoder_latents.clamp(-1, 1)\n\n image_small = decoder_latents\n\n # done decoder\n\n # super res\n\n self.super_res_scheduler.set_timesteps(super_res_num_inference_steps, device=device)\n super_res_timesteps_tensor = self.super_res_scheduler.timesteps\n\n channels = self.super_res_first.in_channels // 2\n height = self.super_res_first.sample_size\n width = self.super_res_first.sample_size\n\n super_res_latents = self.prepare_latents(\n (batch_size, channels, height, width),\n image_small.dtype,\n device,\n generator,\n super_res_latents,\n self.super_res_scheduler,\n )\n\n interpolate_antialias = {}\n if \"antialias\" in inspect.signature(F.interpolate).parameters:\n interpolate_antialias[\"antialias\"] = True\n\n image_upscaled = F.interpolate(\n image_small, size=[height, width], mode=\"bicubic\", align_corners=False, **interpolate_antialias\n )\n\n for i, t in enumerate(self.progress_bar(super_res_timesteps_tensor)):\n # no classifier free guidance\n\n if i == super_res_timesteps_tensor.shape[0] - 1:\n unet = self.super_res_last\n else:\n unet = self.super_res_first\n\n latent_model_input = torch.cat([super_res_latents, image_upscaled], dim=1)\n\n noise_pred = unet(\n sample=latent_model_input,\n timestep=t,\n ).sample\n\n if i + 1 == super_res_timesteps_tensor.shape[0]:\n prev_timestep = None\n else:\n prev_timestep = super_res_timesteps_tensor[i + 1]\n\n # compute the previous noisy sample x_t -> x_t-1\n super_res_latents = self.super_res_scheduler.step(\n noise_pred, t, super_res_latents, prev_timestep=prev_timestep, generator=generator\n ).prev_sample\n\n image = super_res_latents\n # done super res\n\n # post processing\n\n image = image * 0.5 + 0.5\n image = image.clamp(0, 1)\n image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n if output_type == \"pil\":\n image = self.numpy_to_pil(image)\n\n if not return_dict:\n return (image,)\n\n return ImagePipelineOutput(images=image)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "r1_penalty", "code": "def r1_penalty(real_pred, real_img):\n \"\"\"R1 regularization for discriminator. The core idea is to\n penalize the gradient on real data alone: when the\n generator distribution produces the true data distribution\n and the discriminator is equal to 0 on the data manifold, the\n gradient penalty ensures that the discriminator cannot create\n a non-zero gradient orthogonal to the data manifold without\n suffering a loss in the GAN game.\n\n Ref:\n Eq. 9 in Which training methods for GANs do actually converge.\n \"\"\"\n grad_real = autograd.grad(outputs=real_pred.sum(), inputs=real_img, create_graph=True)[0]\n grad_penalty = grad_real.pow(2).view(grad_real.shape[0], -1).sum(1).mean()\n return grad_penalty", "docstring": "\"\"\"R1 regularization for discriminator. The core idea is to\n penalize the gradient on real data alone: when the\n generator distribution produces the true data distribution\n and the discriminator is equal to 0 on the data manifold, the\n gradient penalty ensures that the discriminator cannot create\n a non-zero gradient orthogonal to the data manifold without\n suffering a loss in the GAN game.\n\n Ref:\n Eq. 9 in Which training methods for GANs do actually converge.\n \"\"\"", "url": "https://github.com/wolverinn/stable-diffusion-multi-user/blob/1d79ad90de9c75692bd8e49d57679697dbefd393/sd-docker-slim/repositories/CodeFormer/basicsr/losses/losses.py#L390-L404", "sha": "1d79ad90de9c75692bd8e49d57679697dbefd393", "code/function": "def r1_penalty(real_pred, real_img):\n \n grad_real = autograd.grad(outputs=real_pred.sum(), inputs=real_img, create_graph=True)[0]\n grad_penalty = grad_real.pow(2).view(grad_real.shape[0], -1).sum(1).mean()\n return grad_penalty"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ModelSpec.name", "code": "@property\n def name(self) -> str:\n \"\"\"Return the model name (for backwards compatibility).\"\"\"\n return self.id", "docstring": "\"\"\"Return the model name (for backwards compatibility).\"\"\"", "url": "https://github.com/autonomi-ai/nos/blob/2761f7b50fa3173c74ec63a3321527fbb980b9ac/nos/common/spec.py#L449-L452", "sha": "2761f7b50fa3173c74ec63a3321527fbb980b9ac", "code/function": "@property\n def name(self) -> str:\n \n return self.id"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "overlay_instances", "code": "def overlay_instances(\n self,\n *,\n boxes=None,\n labels=None,\n masks=None,\n keypoints=None,\n assigned_colors=None,\n alpha=0.5,\n ):\n \"\"\"\n Args:\n boxes (Boxes, RotatedBoxes or ndarray): either a :class:`Boxes`,\n or an Nx4 numpy array of XYXY_ABS format for the N objects in a single image,\n or a :class:`RotatedBoxes`,\n or an Nx5 numpy array of (x_center, y_center, width, height, angle_degrees) format\n for the N objects in a single image,\n labels (list[str]): the text to be displayed for each instance.\n masks (masks-like object): Supported types are:\n\n * :class:`detectron2.structures.PolygonMasks`,\n :class:`detectron2.structures.BitMasks`.\n * list[list[ndarray]]: contains the segmentation masks for all objects in one image.\n The first level of the list corresponds to individual instances. The second\n level to all the polygon that compose the instance, and the third level\n to the polygon coordinates. The third level should have the format of\n [x0, y0, x1, y1, ..., xn, yn] (n >= 3).\n * list[ndarray]: each ndarray is a binary mask of shape (H, W).\n * list[dict]: each dict is a COCO-style RLE.\n keypoints (Keypoint or array like): an array-like object of shape (N, K, 3),\n where the N is the number of instances and K is the number of keypoints.\n The last dimension corresponds to (x, y, visibility or score).\n assigned_colors (list[matplotlib.colors]): a list of colors, where each color\n corresponds to each mask or box in the image. Refer to 'matplotlib.colors'\n for full list of formats that the colors are accepted in.\n Returns:\n output (VisImage): image object with visualizations.\n \"\"\"\n num_instances = 0\n if boxes is not None:\n boxes = self._convert_boxes(boxes)\n num_instances = len(boxes)\n if masks is not None:\n masks = self._convert_masks(masks)\n if num_instances:\n assert len(masks) == num_instances\n else:\n num_instances = len(masks)\n if keypoints is not None:\n if num_instances:\n assert len(keypoints) == num_instances\n else:\n num_instances = len(keypoints)\n keypoints = self._convert_keypoints(keypoints)\n if labels is not None:\n assert len(labels) == num_instances\n if assigned_colors is None:\n assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)]\n if num_instances == 0:\n return self.output\n if boxes is not None and boxes.shape[1] == 5:\n return self.overlay_rotated_instances(\n boxes=boxes, labels=labels, assigned_colors=assigned_colors\n )\n\n # Display in largest to smallest order to reduce occlusion.\n areas = None\n if boxes is not None:\n areas = np.prod(boxes[:, 2:] - boxes[:, :2], axis=1)\n elif masks is not None:\n areas = np.asarray([x.area() for x in masks])\n\n if areas is not None:\n sorted_idxs = np.argsort(-areas).tolist()\n # Re-order overlapped instances in descending order.\n boxes = boxes[sorted_idxs] if boxes is not None else None\n labels = [labels[k] for k in sorted_idxs] if labels is not None else None\n masks = [masks[idx] for idx in sorted_idxs] if masks is not None else None\n assigned_colors = [assigned_colors[idx] for idx in sorted_idxs]\n keypoints = keypoints[sorted_idxs] if keypoints is not None else None\n\n for i in range(num_instances):\n color = assigned_colors[i]\n if boxes is not None:\n self.draw_box(boxes[i], edge_color=color)\n\n if masks is not None:\n for segment in masks[i].polygons:\n self.draw_polygon(segment.reshape(-1, 2), color, alpha=alpha)\n\n if labels is not None:\n # first get a box\n if boxes is not None:\n x0, y0, x1, y1 = boxes[i]\n text_pos = (x0, y0) # if drawing boxes, put text on the box corner.\n horiz_align = \"left\"\n elif masks is not None:\n # skip small mask without polygon\n if len(masks[i].polygons) == 0:\n continue\n\n x0, y0, x1, y1 = masks[i].bbox()\n\n # draw text in the center (defined by median) when box is not drawn\n # median is less sensitive to outliers.\n text_pos = np.median(masks[i].mask.nonzero(), axis=1)[::-1]\n horiz_align = \"center\"\n else:\n continue # drawing the box confidence for keypoints isn't very useful.\n # for small objects, draw text at the side to avoid occlusion\n instance_area = (y1 - y0) * (x1 - x0)\n if (\n instance_area < _SMALL_OBJECT_AREA_THRESH * self.output.scale\n or y1 - y0 < 40 * self.output.scale\n ):\n if y1 >= self.output.height - 5:\n text_pos = (x1, y0)\n else:\n text_pos = (x0, y1)\n\n height_ratio = (y1 - y0) / np.sqrt(self.output.height * self.output.width)\n lighter_color = self._change_color_brightness(color, brightness_factor=0.7)\n font_size = (\n np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2)\n * 0.5\n * self._default_font_size\n )\n self.draw_text(\n labels[i],\n text_pos,\n color=lighter_color,\n horizontal_alignment=horiz_align,\n font_size=font_size,\n )\n\n # draw keypoints\n if keypoints is not None:\n for keypoints_per_instance in keypoints:\n self.draw_and_connect_keypoints(keypoints_per_instance)\n\n return self.output", "docstring": "\"\"\"\n Args:\n boxes (Boxes, RotatedBoxes or ndarray): either a :class:`Boxes`,\n or an Nx4 numpy array of XYXY_ABS format for the N objects in a single image,\n or a :class:`RotatedBoxes`,\n or an Nx5 numpy array of (x_center, y_center, width, height, angle_degrees) format\n for the N objects in a single image,\n labels (list[str]): the text to be displayed for each instance.\n masks (masks-like object): Supported types are:\n\n * :class:`detectron2.structures.PolygonMasks`,\n :class:`detectron2.structures.BitMasks`.\n * list[list[ndarray]]: contains the segmentation masks for all objects in one image.\n The first level of the list corresponds to individual instances. The second\n level to all the polygon that compose the instance, and the third level\n to the polygon coordinates. The third level should have the format of\n [x0, y0, x1, y1, ..., xn, yn] (n >= 3).\n * list[ndarray]: each ndarray is a binary mask of shape (H, W).\n * list[dict]: each dict is a COCO-style RLE.\n keypoints (Keypoint or array like): an array-like object of shape (N, K, 3),\n where the N is the number of instances and K is the number of keypoints.\n The last dimension corresponds to (x, y, visibility or score).\n assigned_colors (list[matplotlib.colors]): a list of colors, where each color\n corresponds to each mask or box in the image. Refer to 'matplotlib.colors'\n for full list of formats that the colors are accepted in.\n Returns:\n output (VisImage): image object with visualizations.\n \"\"\"", "url": "https://github.com/OpenGVLab/Ask-Anything/blob/c7f879b10533ba7d030c04ac559374663e35e3a4/video_chat_text/video_chat_with_ChatGPT/models/grit_src/third_party/CenterNet2/detectron2/utils/visualizer.py#L607-L747", "sha": "c7f879b10533ba7d030c04ac559374663e35e3a4", "code/function": "def overlay_instances(\n self,\n *,\n boxes=None,\n labels=None,\n masks=None,\n keypoints=None,\n assigned_colors=None,\n alpha=0.5,\n ):\n \n num_instances = 0\n if boxes is not None:\n boxes = self._convert_boxes(boxes)\n num_instances = len(boxes)\n if masks is not None:\n masks = self._convert_masks(masks)\n if num_instances:\n assert len(masks) == num_instances\n else:\n num_instances = len(masks)\n if keypoints is not None:\n if num_instances:\n assert len(keypoints) == num_instances\n else:\n num_instances = len(keypoints)\n keypoints = self._convert_keypoints(keypoints)\n if labels is not None:\n assert len(labels) == num_instances\n if assigned_colors is None:\n assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)]\n if num_instances == 0:\n return self.output\n if boxes is not None and boxes.shape[1] == 5:\n return self.overlay_rotated_instances(\n boxes=boxes, labels=labels, assigned_colors=assigned_colors\n )\n\n # Display in largest to smallest order to reduce occlusion.\n areas = None\n if boxes is not None:\n areas = np.prod(boxes[:, 2:] - boxes[:, :2], axis=1)\n elif masks is not None:\n areas = np.asarray([x.area() for x in masks])\n\n if areas is not None:\n sorted_idxs = np.argsort(-areas).tolist()\n # Re-order overlapped instances in descending order.\n boxes = boxes[sorted_idxs] if boxes is not None else None\n labels = [labels[k] for k in sorted_idxs] if labels is not None else None\n masks = [masks[idx] for idx in sorted_idxs] if masks is not None else None\n assigned_colors = [assigned_colors[idx] for idx in sorted_idxs]\n keypoints = keypoints[sorted_idxs] if keypoints is not None else None\n\n for i in range(num_instances):\n color = assigned_colors[i]\n if boxes is not None:\n self.draw_box(boxes[i], edge_color=color)\n\n if masks is not None:\n for segment in masks[i].polygons:\n self.draw_polygon(segment.reshape(-1, 2), color, alpha=alpha)\n\n if labels is not None:\n # first get a box\n if boxes is not None:\n x0, y0, x1, y1 = boxes[i]\n text_pos = (x0, y0) # if drawing boxes, put text on the box corner.\n horiz_align = \"left\"\n elif masks is not None:\n # skip small mask without polygon\n if len(masks[i].polygons) == 0:\n continue\n\n x0, y0, x1, y1 = masks[i].bbox()\n\n # draw text in the center (defined by median) when box is not drawn\n # median is less sensitive to outliers.\n text_pos = np.median(masks[i].mask.nonzero(), axis=1)[::-1]\n horiz_align = \"center\"\n else:\n continue # drawing the box confidence for keypoints isn't very useful.\n # for small objects, draw text at the side to avoid occlusion\n instance_area = (y1 - y0) * (x1 - x0)\n if (\n instance_area < _SMALL_OBJECT_AREA_THRESH * self.output.scale\n or y1 - y0 < 40 * self.output.scale\n ):\n if y1 >= self.output.height - 5:\n text_pos = (x1, y0)\n else:\n text_pos = (x0, y1)\n\n height_ratio = (y1 - y0) / np.sqrt(self.output.height * self.output.width)\n lighter_color = self._change_color_brightness(color, brightness_factor=0.7)\n font_size = (\n np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2)\n * 0.5\n * self._default_font_size\n )\n self.draw_text(\n labels[i],\n text_pos,\n color=lighter_color,\n horizontal_alignment=horiz_align,\n font_size=font_size,\n )\n\n # draw keypoints\n if keypoints is not None:\n for keypoints_per_instance in keypoints:\n self.draw_and_connect_keypoints(keypoints_per_instance)\n\n return self.output"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "predict_proposals", "code": "def predict_proposals(\n self,\n anchors: List[Boxes],\n pred_objectness_logits: List[torch.Tensor],\n pred_anchor_deltas: List[torch.Tensor],\n image_sizes: List[Tuple[int, int]],\n ):\n \"\"\"\n Decode all the predicted box regression deltas to proposals. Find the top proposals\n by applying NMS and removing boxes that are too small.\n\n Returns:\n proposals (list[Instances]): list of N Instances. The i-th Instances\n stores post_nms_topk object proposals for image i, sorted by their\n objectness score in descending order.\n \"\"\"\n # The proposals are treated as fixed for joint training with roi heads.\n # This approach ignores the derivative w.r.t. the proposal boxes’ coordinates that\n # are also network responses.\n with torch.no_grad():\n pred_proposals = self._decode_proposals(anchors, pred_anchor_deltas)\n return find_top_rpn_proposals(\n pred_proposals,\n pred_objectness_logits,\n image_sizes,\n self.nms_thresh,\n self.pre_nms_topk[self.training],\n self.post_nms_topk[self.training],\n self.min_box_size,\n self.training,\n )", "docstring": "\"\"\"\n Decode all the predicted box regression deltas to proposals. Find the top proposals\n by applying NMS and removing boxes that are too small.\n\n Returns:\n proposals (list[Instances]): list of N Instances. The i-th Instances\n stores post_nms_topk object proposals for image i, sorted by their\n objectness score in descending order.\n \"\"\"", "url": "https://github.com/OpenGVLab/Ask-Anything/blob/c7f879b10533ba7d030c04ac559374663e35e3a4/video_chat_text/video_chat_with_StableLM/models/grit_src/third_party/CenterNet2/detectron2/modeling/proposal_generator/rpn.py#L482-L512", "sha": "c7f879b10533ba7d030c04ac559374663e35e3a4", "code/function": "def predict_proposals(\n self,\n anchors: List[Boxes],\n pred_objectness_logits: List[torch.Tensor],\n pred_anchor_deltas: List[torch.Tensor],\n image_sizes: List[Tuple[int, int]],\n ):\n \n # The proposals are treated as fixed for joint training with roi heads.\n # This approach ignores the derivative w.r.t. the proposal boxes’ coordinates that\n # are also network responses.\n with torch.no_grad():\n pred_proposals = self._decode_proposals(anchors, pred_anchor_deltas)\n return find_top_rpn_proposals(\n pred_proposals,\n pred_objectness_logits,\n image_sizes,\n self.nms_thresh,\n self.pre_nms_topk[self.training],\n self.post_nms_topk[self.training],\n self.min_box_size,\n self.training,\n )"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__init__", "code": "def __init__(self, tensor: torch.Tensor, image_sizes: List[Tuple[int, int]]):\n \"\"\"\n Arguments:\n tensor (Tensor): of shape (N, H, W) or (N, C_1, ..., C_K, H, W) where K >= 1\n image_sizes (list[tuple[int, int]]): Each tuple is (h, w). It can\n be smaller than (H, W) due to padding.\n \"\"\"\n self.tensor = tensor\n self.image_sizes = image_sizes", "docstring": "\"\"\"\n Arguments:\n tensor (Tensor): of shape (N, H, W) or (N, C_1, ..., C_K, H, W) where K >= 1\n image_sizes (list[tuple[int, int]]): Each tuple is (h, w). It can\n be smaller than (H, W) due to padding.\n \"\"\"", "url": "https://github.com/OpenGVLab/Ask-Anything/blob/c7f879b10533ba7d030c04ac559374663e35e3a4/video_chat_text/video_chat_with_StableLM/models/grit_src/third_party/CenterNet2/detectron2/structures/image_list.py#L23-L31", "sha": "c7f879b10533ba7d030c04ac559374663e35e3a4", "code/function": "def __init__(self, tensor: torch.Tensor, image_sizes: List[Tuple[int, int]]):\n \n self.tensor = tensor\n self.image_sizes = image_sizes"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "swish", "code": "def swish(x, inplace: bool = False):\n \"\"\"Swish - Described in: https://arxiv.org/abs/1710.05941\n \"\"\"\n return x.mul_(x.sigmoid()) if inplace else x.mul(x.sigmoid())", "docstring": "\"\"\"Swish - Described in: https://arxiv.org/abs/1710.05941\n \"\"\"", "url": "https://github.com/OpenGVLab/Ask-Anything/blob/c7f879b10533ba7d030c04ac559374663e35e3a4/video_chat_text/video_chat_with_StableLM/models/grit_src/third_party/CenterNet2/projects/CenterNet2/centernet/modeling/backbone/bifpn.py#L40-L43", "sha": "c7f879b10533ba7d030c04ac559374663e35e3a4", "code/function": "def swish(x, inplace: bool = False):\n \n return x.mul_(x.sigmoid()) if inplace else x.mul(x.sigmoid())"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "voc_eval", "code": "def voc_eval(detpath, annopath, imagesetfile, classname, ovthresh=0.5, use_07_metric=False):\n \"\"\"rec, prec, ap = voc_eval(detpath,\n annopath,\n imagesetfile,\n classname,\n [ovthresh],\n [use_07_metric])\n\n Top level function that does the PASCAL VOC evaluation.\n\n detpath: Path to detections\n detpath.format(classname) should produce the detection results file.\n annopath: Path to annotations\n annopath.format(imagename) should be the xml annotations file.\n imagesetfile: Text file containing the list of images, one image per line.\n classname: Category name (duh)\n [ovthresh]: Overlap threshold (default = 0.5)\n [use_07_metric]: Whether to use VOC07's 11 point AP computation\n (default False)\n \"\"\"\n # assumes detections are in detpath.format(classname)\n # assumes annotations are in annopath.format(imagename)\n # assumes imagesetfile is a text file with each line an image name\n\n # first load gt\n # read list of images\n with PathManager.open(imagesetfile, \"r\") as f:\n lines = f.readlines()\n imagenames = [x.strip() for x in lines]\n\n # load annots\n recs = {}\n for imagename in imagenames:\n recs[imagename] = parse_rec(annopath.format(imagename))\n\n # extract gt objects for this class\n class_recs = {}\n npos = 0\n for imagename in imagenames:\n R = [obj for obj in recs[imagename] if obj[\"name\"] == classname]\n bbox = np.array([x[\"bbox\"] for x in R])\n difficult = np.array([x[\"difficult\"] for x in R]).astype(np.bool)\n # difficult = np.array([False for x in R]).astype(np.bool) # treat all \"difficult\" as GT\n det = [False] * len(R)\n npos = npos + sum(~difficult)\n class_recs[imagename] = {\"bbox\": bbox, \"difficult\": difficult, \"det\": det}\n\n # read dets\n detfile = detpath.format(classname)\n with open(detfile, \"r\") as f:\n lines = f.readlines()\n\n splitlines = [x.strip().split(\" \") for x in lines]\n image_ids = [x[0] for x in splitlines]\n confidence = np.array([float(x[1]) for x in splitlines])\n BB = np.array([[float(z) for z in x[2:]] for x in splitlines]).reshape(-1, 4)\n\n # sort by confidence\n sorted_ind = np.argsort(-confidence)\n BB = BB[sorted_ind, :]\n image_ids = [image_ids[x] for x in sorted_ind]\n\n # go down dets and mark TPs and FPs\n nd = len(image_ids)\n tp = np.zeros(nd)\n fp = np.zeros(nd)\n for d in range(nd):\n R = class_recs[image_ids[d]]\n bb = BB[d, :].astype(float)\n ovmax = -np.inf\n BBGT = R[\"bbox\"].astype(float)\n\n if BBGT.size > 0:\n # compute overlaps\n # intersection\n ixmin = np.maximum(BBGT[:, 0], bb[0])\n iymin = np.maximum(BBGT[:, 1], bb[1])\n ixmax = np.minimum(BBGT[:, 2], bb[2])\n iymax = np.minimum(BBGT[:, 3], bb[3])\n iw = np.maximum(ixmax - ixmin + 1.0, 0.0)\n ih = np.maximum(iymax - iymin + 1.0, 0.0)\n inters = iw * ih\n\n # union\n uni = (\n (bb[2] - bb[0] + 1.0) * (bb[3] - bb[1] + 1.0)\n + (BBGT[:, 2] - BBGT[:, 0] + 1.0) * (BBGT[:, 3] - BBGT[:, 1] + 1.0)\n - inters\n )\n\n overlaps = inters / uni\n ovmax = np.max(overlaps)\n jmax = np.argmax(overlaps)\n\n if ovmax > ovthresh:\n if not R[\"difficult\"][jmax]:\n if not R[\"det\"][jmax]:\n tp[d] = 1.0\n R[\"det\"][jmax] = 1\n else:\n fp[d] = 1.0\n else:\n fp[d] = 1.0\n\n # compute precision recall\n fp = np.cumsum(fp)\n tp = np.cumsum(tp)\n rec = tp / float(npos)\n # avoid divide by zero in case the first detection matches a difficult\n # ground truth\n prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)\n ap = voc_ap(rec, prec, use_07_metric)\n\n return rec, prec, ap", "docstring": "\"\"\"rec, prec, ap = voc_eval(detpath,\n annopath,\n imagesetfile,\n classname,\n [ovthresh],\n [use_07_metric])\n\n Top level function that does the PASCAL VOC evaluation.\n\n detpath: Path to detections\n detpath.format(classname) should produce the detection results file.\n annopath: Path to annotations\n annopath.format(imagename) should be the xml annotations file.\n imagesetfile: Text file containing the list of images, one image per line.\n classname: Category name (duh)\n [ovthresh]: Overlap threshold (default = 0.5)\n [use_07_metric]: Whether to use VOC07's 11 point AP computation\n (default False)\n \"\"\"", "url": "https://github.com/OpenGVLab/Ask-Anything/blob/c7f879b10533ba7d030c04ac559374663e35e3a4/video_chat_with_MOSS/models/grit_src/third_party/CenterNet2/detectron2/evaluation/pascal_voc_evaluation.py#L187-L300", "sha": "c7f879b10533ba7d030c04ac559374663e35e3a4", "code/function": "def voc_eval(detpath, annopath, imagesetfile, classname, ovthresh=0.5, use_07_metric=False):\n \n # assumes detections are in detpath.format(classname)\n # assumes annotations are in annopath.format(imagename)\n # assumes imagesetfile is a text file with each line an image name\n\n # first load gt\n # read list of images\n with PathManager.open(imagesetfile, \"r\") as f:\n lines = f.readlines()\n imagenames = [x.strip() for x in lines]\n\n # load annots\n recs = {}\n for imagename in imagenames:\n recs[imagename] = parse_rec(annopath.format(imagename))\n\n # extract gt objects for this class\n class_recs = {}\n npos = 0\n for imagename in imagenames:\n R = [obj for obj in recs[imagename] if obj[\"name\"] == classname]\n bbox = np.array([x[\"bbox\"] for x in R])\n difficult = np.array([x[\"difficult\"] for x in R]).astype(np.bool)\n # difficult = np.array([False for x in R]).astype(np.bool) # treat all \"difficult\" as GT\n det = [False] * len(R)\n npos = npos + sum(~difficult)\n class_recs[imagename] = {\"bbox\": bbox, \"difficult\": difficult, \"det\": det}\n\n # read dets\n detfile = detpath.format(classname)\n with open(detfile, \"r\") as f:\n lines = f.readlines()\n\n splitlines = [x.strip().split(\" \") for x in lines]\n image_ids = [x[0] for x in splitlines]\n confidence = np.array([float(x[1]) for x in splitlines])\n BB = np.array([[float(z) for z in x[2:]] for x in splitlines]).reshape(-1, 4)\n\n # sort by confidence\n sorted_ind = np.argsort(-confidence)\n BB = BB[sorted_ind, :]\n image_ids = [image_ids[x] for x in sorted_ind]\n\n # go down dets and mark TPs and FPs\n nd = len(image_ids)\n tp = np.zeros(nd)\n fp = np.zeros(nd)\n for d in range(nd):\n R = class_recs[image_ids[d]]\n bb = BB[d, :].astype(float)\n ovmax = -np.inf\n BBGT = R[\"bbox\"].astype(float)\n\n if BBGT.size > 0:\n # compute overlaps\n # intersection\n ixmin = np.maximum(BBGT[:, 0], bb[0])\n iymin = np.maximum(BBGT[:, 1], bb[1])\n ixmax = np.minimum(BBGT[:, 2], bb[2])\n iymax = np.minimum(BBGT[:, 3], bb[3])\n iw = np.maximum(ixmax - ixmin + 1.0, 0.0)\n ih = np.maximum(iymax - iymin + 1.0, 0.0)\n inters = iw * ih\n\n # union\n uni = (\n (bb[2] - bb[0] + 1.0) * (bb[3] - bb[1] + 1.0)\n + (BBGT[:, 2] - BBGT[:, 0] + 1.0) * (BBGT[:, 3] - BBGT[:, 1] + 1.0)\n - inters\n )\n\n overlaps = inters / uni\n ovmax = np.max(overlaps)\n jmax = np.argmax(overlaps)\n\n if ovmax > ovthresh:\n if not R[\"difficult\"][jmax]:\n if not R[\"det\"][jmax]:\n tp[d] = 1.0\n R[\"det\"][jmax] = 1\n else:\n fp[d] = 1.0\n else:\n fp[d] = 1.0\n\n # compute precision recall\n fp = np.cumsum(fp)\n tp = np.cumsum(tp)\n rec = tp / float(npos)\n # avoid divide by zero in case the first detection matches a difficult\n # ground truth\n prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)\n ap = voc_ap(rec, prec, use_07_metric)\n\n return rec, prec, ap"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "apply_image", "code": "def apply_image(self, img, interp=None):\n \"\"\"\n img should be a numpy array, formatted as Height * Width * Nchannels\n \"\"\"\n if len(img) == 0 or self.angle % 360 == 0:\n return img\n assert img.shape[:2] == (self.h, self.w)\n interp = interp if interp is not None else self.interp\n return cv2.warpAffine(img, self.rm_image, (self.bound_w, self.bound_h), flags=interp)", "docstring": "\"\"\"\n img should be a numpy array, formatted as Height * Width * Nchannels\n \"\"\"", "url": "https://github.com/OpenGVLab/Ask-Anything/blob/c7f879b10533ba7d030c04ac559374663e35e3a4/video_chat_with_StableLM/models/grit_src/third_party/CenterNet2/detectron2/data/transforms/transform.py#L200-L208", "sha": "c7f879b10533ba7d030c04ac559374663e35e3a4", "code/function": "def apply_image(self, img, interp=None):\n \n if len(img) == 0 or self.angle % 360 == 0:\n return img\n assert img.shape[:2] == (self.h, self.w)\n interp = interp if interp is not None else self.interp\n return cv2.warpAffine(img, self.rm_image, (self.bound_w, self.bound_h), flags=interp)"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Kamal::Git.untracked_files", "code": "def untracked_files\n `git ls-files --others`.lines.map(&:strip)\n end", "docstring": "# returns an array of relative path names of untracked files, including gitignored files", "url": "https://github.com/basecamp/kamal/blob/6f29d4e78bc29c3392f54f93ea0451ad1ff68b13/lib/kamal/git.rb#L34-L36", "sha": "6f29d4e78bc29c3392f54f93ea0451ad1ff68b13"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Kamal::Utils.escape_shell_value", "code": "def escape_shell_value(value)\n value.to_s.scan(/[\\x00-\\x7F]+|[^\\x00-\\x7F]+/) \\\n .map { |part| part.ascii_only? ? escape_ascii_shell_value(part) : part }\n .join\n end", "docstring": "# Escape a value to make it safe for shell use.", "url": "https://github.com/basecamp/kamal/blob/6f29d4e78bc29c3392f54f93ea0451ad1ff68b13/lib/kamal/utils.rb#L60-L64", "sha": "6f29d4e78bc29c3392f54f93ea0451ad1ff68b13"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Kamal::Commands::Docker.installed?", "code": "def installed?\n docker \"-v\"\n end", "docstring": "# Checks the Docker client version. Fails if Docker is not installed.", "url": "https://github.com/basecamp/kamal/blob/6f29d4e78bc29c3392f54f93ea0451ad1ff68b13/lib/kamal/commands/docker.rb#L8-L10", "sha": "6f29d4e78bc29c3392f54f93ea0451ad1ff68b13"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "puma_stop", "code": "def puma_stop\n if inside_rails_conainer?\n puts 'Stopping PUMA'\n pids = `pgrep -f puma`.split(\"\\n\")\n pids.delete(Process.pid.to_s)\n pids.each { |pid| system \"kill -9 #{pid}\" }\n else\n run_rails_command('pkill -f puma')\n end\n end", "docstring": "# Do not use: `pkill -f puma`", "url": "https://github.com/the-teacher/rails7-startkit/blob/51bb127e2114910bdc759f6974740a5c4703a53d/Rails7StartKit/bin/puma.rb#L20-L29", "sha": "51bb127e2114910bdc759f6974740a5c4703a53d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cache", "code": "def cache\n puts 'Toggle App Cache in development mode'\n cache_toggle_file = 'tmp/caching-dev.txt'\n\n FileUtils.chdir APP_ROOT do\n if File.exist?(cache_toggle_file)\n puts \"File '#{cache_toggle_file}' exists\"\n remove_file(cache_toggle_file)\n puts \"File '#{cache_toggle_file}' is removed\"\n puts 'Cache is OFF'\n else\n puts \"File '#{cache_toggle_file}' does not exist\"\n touch_file(cache_toggle_file)\n puts \"File '#{cache_toggle_file}' is created\"\n puts 'Cache is ON'\n end\n end\n\n # Not sure if for windows it will work.\n # https://stackoverflow.com/questions/11982057/how-can-i-trigger-a-shell-script-and-run-in-background-async-in-ruby\n #\n puts 'Restarting PUMA server'\n puma_restart\n end", "docstring": "# def get_secret_key", "url": "https://github.com/the-teacher/rails7-startkit/blob/51bb127e2114910bdc759f6974740a5c4703a53d/Rails7StartKit/bin/rails7startkit.rb#L57-L80", "sha": "51bb127e2114910bdc759f6974740a5c4703a53d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Devise.Api.Responses.ErrorResponse.error_description", "code": "def error_description\n return [I18n.t(\"devise.api.error_response.#{error}\")] if record.blank?\n if invalid_authentication_error? && devise_lockable_info.present? && record.access_locked?\n return [I18n.t('devise.api.error_response.lockable.locked')]\n end\n if invalid_authentication_error? && devise_confirmable_info.present? && !record.confirmed?\n return [I18n.t('devise.api.error_response.confirmable.unconfirmed')]\n end\n return [I18n.t('devise.api.error_response.invalid_authentication')] if invalid_authentication_error?\n\n record.errors.full_messages\n end", "docstring": "# rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity", "url": "https://github.com/nejdetkadir/devise-api/blob/bd49310c4e96ec6e56ec38c2542754718b2f1c57/lib/devise/api/responses/error_response.rb#L59-L70", "sha": "bd49310c4e96ec6e56ec38c2542754718b2f1c57"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReviseAuth.Authentication.login", "code": "def login(user)\n user_return_to = session[:user_return_to]\n reset_session\n Current.user = user\n session[:user_id] = user.id\n session[:user_return_to] = user_return_to\n end", "docstring": "# - Save a session cookie so the next request is authenticated", "url": "https://github.com/excid3/revise_auth/blob/6af392c5138f18c18205acfc404d355bda17d80c/lib/revise_auth/authentication.rb#L60-L66", "sha": "6af392c5138f18c18205acfc404d355bda17d80c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReviseAuth.Authentication.revise_auth_controller?", "code": "def revise_auth_controller?\n is_a?(::ReviseAuthController)\n end", "docstring": "# before_action :authenticate_user!, unless: :revise_auth_controller?", "url": "https://github.com/excid3/revise_auth/blob/6af392c5138f18c18205acfc404d355bda17d80c/lib/revise_auth/authentication.rb#L94-L96", "sha": "6af392c5138f18c18205acfc404d355bda17d80c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SolidQueue::Processes.Interruptible.interruptible_sleep", "code": "def interruptible_sleep(time)\n # Invoking this from the main thread may result in significant slowdown.\n # Utilizing asynchronous execution (Futures) addresses this performance issue.\n Concurrent::Promises.future(time) do |timeout|\n queue.clear unless queue.pop(timeout:).nil?\n end.on_rejection! do |e|\n wrapped_exception = RuntimeError.new(\"Interruptible#interruptible_sleep - #{e.class}: #{e.message}\")\n wrapped_exception.set_backtrace(e.backtrace)\n handle_thread_error(wrapped_exception)\n end.value\n\n nil\n end", "docstring": "# @param time [Numeric, Duration] the time to sleep. 0 returns immediately.", "url": "https://github.com/rails/solid_queue/blob/9cd6bc3a16826d04c63ba667c93bd8cbb094db81/lib/solid_queue/processes/interruptible.rb#L19-L31", "sha": "9cd6bc3a16826d04c63ba667c93bd8cbb094db81"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActiveSupport::TestCase.silence_on_thread_error_for", "code": "def silence_on_thread_error_for(expected, &block)\n current_proc = SolidQueue.on_thread_error\n\n SolidQueue.with(on_thread_error: silent_on_thread_error_for(expected, current_proc)) do\n block.call\n end\n end", "docstring": "# @yield Executes the provided block with specified exception(s) silenced", "url": "https://github.com/rails/solid_queue/blob/9cd6bc3a16826d04c63ba667c93bd8cbb094db81/test/test_helper.rb#L85-L91", "sha": "9cd6bc3a16826d04c63ba667c93bd8cbb094db81"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DockerfileGenerator.api_client_dir", "code": "def api_client_dir\n if api_only?\n scan = \"*/package.json\"\n else\n scan = \"{client,frontend}/package.json\"\n end\n\n file = Dir[scan].find do |file|\n JSON.load_file(file).dig(\"scripts\", \"build\")\n end\n\n file && File.dirname(file)\n end", "docstring": "# for specific directories.", "url": "https://github.com/fly-apps/dockerfile-rails/blob/f48c9f23fc888061dd94f170d37bb6260032dde8/lib/generators/dockerfile_generator.rb#L1250-L1262", "sha": "f48c9f23fc888061dd94f170d37bb6260032dde8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CheckmarxScaHelper.fetch_all_vulns_of_project", "code": "def fetch_all_vulns_of_project(token, scan_id)\n print_good \"\\n\"\n print_good \"Getting All vulnerabilities of ScanId: #{scan_id} \\n\"\n checkmarx_sca_vulnerabilites_api_url = \"https://api-sca.checkmarx.net/risk-management/risk-reports/#{scan_id}/vulnerabilities\"\n headers = {\n \"Content-Type\" => \"application/json\",\n \"accept\" => \"application/json\",\n \"Authorization\" => \"Bearer #{token}\"\n }\n auth_response = http_get(checkmarx_sca_vulnerabilites_api_url, headers)\n return nil unless auth_response\n\n begin\n vulns_results = JSON.parse(auth_response.body)\n print_good \"Scan Results: \\n\"\n print_good vulns_results.to_s\n rescue JSON::ParserError\n print_error \"Unable to process scans response!\"\n end\n vulns_results\n end", "docstring": "# method to fetch all vuln of foreach project", "url": "https://github.com/aleshevdenis/128iid/blob/1709c456363b4f9aca306328bbad14555735d88b/tasks/connectors/checkmarx_sca/lib/checkmarx_sca_helper.rb#L83-L103", "sha": "1709c456363b4f9aca306328bbad14555735d88b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExpanseIssues.ExpanseIssuesMapper.default_issue_field_mapping", "code": "def default_issue_field_mapping(issue_type)\n {\n \"asset\" => [\n { action: \"copy\", source: \"ip\", target: \"ip_address\" },\n { action: \"proc\",\n target: \"hostname\",\n proc: lambda { |x|\n temp = x[\"domain\"]\n temp = x[\"assets\"].first[\"displayName\"] if temp.nil? && x[\"assets\"].first[\"assetType\"].match?(/Domain/im)\n # temp = temp.gsub(\"\\*\", \"WILDCARD\") unless temp.nil?\n temp\n } },\n { action: \"proc\",\n target: \"tags\",\n proc: lambda { |x|\n temp = [\"Expanse\"] # always tag as 'Expanse'\n\n # Handle new businessUnits (plural) tag\n if x.key?(\"businessUnits\")\n x[\"businessUnits\"].foreach do |bu|\n temp << \"businessUnit:#{bu.fetch('name')}\"\n end\n end\n\n # Annotations are like tags, add foreach one\n # if x.key?(\"annotations\")\n # x[\"annotations\"][\"tags\"].foreach do |at|\n # temp << at.fetch(\"name\")\n # end\n # end\n\n # flatten since we have an array of arrays\n temp.flatten\n } }\n ],\n \"vuln\" => [\n { action: \"proc\", target: \"vuln_def_name\", proc: ->(_x) { issue_type } },\n { action: \"proc\", target: \"scanner_identifier\", proc: ->(x) { x[\"id\"] } },\n { action: \"proc\", target: \"created_at\", proc: ->(x) { x[\"initialEvidence\"][\"timestamp\"] } },\n { action: \"proc\", target: \"last_seen_at\", proc: ->(x) { x[\"latestEvidence\"][\"timestamp\"] } },\n { action: \"proc\", target: \"port\", proc: ->(x) { (x[\"portNumber\"] || x[\"initialEvidence\"][\"portNumber\"]).to_i } },\n { action: \"proc\", target: \"details\", proc: ->(x) { \"Headline: #{x['headline']}\\nHelpText: #{x['helpText']}\\n\\nFull Issue:\\n #{JSON.pretty_generate(x)}\" } },\n { action: \"proc\", target: \"scanner_score\", proc: ->(x) { map_issue_priority(x[\"priority\"]) } },\n { action: \"proc\", target: \"override_score\", proc: ->(x) { map_issue_priority(x[\"priority\"]).to_i * 10 } },\n { action: \"data\", target: \"scanner_type\", data: \"Expanse_issues\" }\n ],\n \"vuln_def\" => [\n { action: \"data\", target: \"scanner_type\", data: \"Expanse_issues\" },\n { action: \"proc\", target: \"name\", proc: ->(_x) { issue_type } },\n { action: \"proc\", target: \"scanner_identifier\", proc: ->(_x) { issue_type } },\n { action: \"proc\", target: \"description\", proc: ->(x) { x[\"headline\"] } },\n { action: \"data\", target: \"remediation\", data: \"Investigate this Issue!\" }\n ]\n }\n end", "docstring": "###", "url": "https://github.com/aleshevdenis/128iid/blob/1709c456363b4f9aca306328bbad14555735d88b/tasks/connectors/digital_footprint/expanse_issues/lib/expanse_issues_mapper.rb#L120-L174", "sha": "1709c456363b4f9aca306328bbad14555735d88b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Edgescan.KennaApi.upload", "code": "def upload\n kdi_upload(@output_dir, \"batch-#{millis}.json\", @kenna_connector_id, @kenna_api_host, @kenna_api_key, @skip_autoclose, @max_retries, @kdi_version)\n end", "docstring": "# you're happy for whatever is there to get imported into Kenna you can call `kickoff`", "url": "https://github.com/aleshevdenis/128iid/blob/1709c456363b4f9aca306328bbad14555735d88b/tasks/connectors/edgescan/lib/kenna_api.rb#L56-L58", "sha": "1709c456363b4f9aca306328bbad14555735d88b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Edgescan.KennaApi.add_assets_from_specifiers", "code": "def add_assets_from_specifiers(edgescan_location_specifiers, edgescan_vulnerabilities)\n # Convert location specifiers into kenna assets, remove any lists within lists, or duplicate assets\n kenna_assets = edgescan_location_specifiers.map(&:to_kenna_asset).flatten.uniq\n # Add any kenna assets, from vulnerabilities, that are not already present\n # This will only happen if a vulnerability does not have a corresponding host or location specifier\n kenna_assets.concat(edgescan_vulnerabilities.map(&:to_kenna_asset).uniq - kenna_assets)\n kenna_assets.foreach do |asset|\n add_asset(asset)\n end\n end", "docstring": "# Converts Edgescan location specifiers and vulnerabilities into Kenna assets and adds them to memory", "url": "https://github.com/aleshevdenis/128iid/blob/1709c456363b4f9aca306328bbad14555735d88b/tasks/connectors/edgescan/lib/kenna_api.rb#L68-L77", "sha": "1709c456363b4f9aca306328bbad14555735d88b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Edgescan.KennaApi.add_asset", "code": "def add_asset(kenna_asset)\n return if (@assets || []).map { |asset| asset[\"external_id\"] }.include?(kenna_asset[\"external_id\"])\n\n create_kdi_asset(kenna_asset, false)\n end", "docstring": "# Adds Kenna asset into memory (if one with the same `external_id` doesn't exist already)", "url": "https://github.com/aleshevdenis/128iid/blob/1709c456363b4f9aca306328bbad14555735d88b/tasks/connectors/edgescan/lib/kenna_api.rb#L88-L92", "sha": "1709c456363b4f9aca306328bbad14555735d88b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Edgescan.KennaApi.add_finding", "code": "def add_finding(external_id, kenna_finding)\n create_kdi_asset_finding({ \"external_id\" => external_id }, kenna_finding, \"external_id\")\n end", "docstring": "# Adds Kenna finding into memory", "url": "https://github.com/aleshevdenis/128iid/blob/1709c456363b4f9aca306328bbad14555735d88b/tasks/connectors/edgescan/lib/kenna_api.rb#L100-L102", "sha": "1709c456363b4f9aca306328bbad14555735d88b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NetsparkerTask.map_state_to_triage_state", "code": "def map_state_to_triage_state(state_string)\n case state_string\n when \"Present\", /Revived|Scanning/\n \"new\"\n when /False Positive/\n \"false_positive\"\n when /Accepted Risk/\n \"risk_accepted\"\n when /Fixed/\n \"resolved\"\n when /Ignored/\n \"not_a_security_issue\"\n end\n end", "docstring": "# Possible Netsparker values are: Present, Accepted Risk, False Positive, Fixed (Unconfirmed), Fixed (Confirmed), Fixed (Can't Retest), Ignored, Revived, Scanning", "url": "https://github.com/aleshevdenis/128iid/blob/1709c456363b4f9aca306328bbad14555735d88b/tasks/connectors/netsparker/netsparker_task.rb#L202-L215", "sha": "1709c456363b4f9aca306328bbad14555735d88b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SL.SearchLink.get_plugin_configs", "code": "def get_plugin_configs(default_config)\n SL::Searches.plugins[:search].each_value do |plugin|\n next unless plugin.key?(:config) && !plugin[:config].nil? && !plugin[:config].empty?\n\n plugin[:config].each do |cfg|\n new_config = get_plugin_config(cfg)\n\n default_config += new_config\n end\n end\n default_config\n end", "docstring": "#", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/config.rb#L241-L252", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SL.SearchLink.restore_prev_config", "code": "def restore_prev_config\n @prev_config&.each do |k, v|\n SL.config[k] = v\n $stderr.print \"\\r\\033[0KReset config: #{k} = #{SL.config[k]}\\n\" unless SILENT\n end\n @prev_config = {}\n end", "docstring": "# Reset configuration", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/config.rb#L291-L297", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "footer", "code": "def footer\n @footer ||= []\n end", "docstring": "# Stores the footer with reference links and footnotes", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/output.rb#L34-L36", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "originput", "code": "def originput\n @originput ||= \"\"\n end", "docstring": "# Stores the original input", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/output.rb#L54-L56", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "add_output", "code": "def add_output(str)\n print str if SL.printout && !SL.clipboard\n SL.output << str\n end", "docstring": "# Adds the given string to the output.", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/output.rb#L122-L125", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "add_report", "code": "def add_report(str)\n return unless SL.config[\"report\"]\n\n unless SL.line_num.nil?\n position = \"#{SL.line_num}:\"\n position += SL.match_column.nil? ? \"0:\" : \"#{SL.match_column}:\"\n position += SL.match_length.nil? ? \"0\" : SL.match_length.to_s\n end\n SL.report.push(\"(#{position}): #{str}\")\n warn \"(#{position}): #{str}\" unless SILENT\n end", "docstring": "# Adds the given string to the report.", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/output.rb#L174-L184", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "print_report", "code": "def print_report\n return if (SL.config[\"inline\"] && SL.originput.split(/\\n/).length == 1) || SL.clipboard\n\n return if SL.report.empty?\n\n out = \"\\n\\n\"\n add_output out\n end", "docstring": "# Prints the report.", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/output.rb#L209-L216", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SL.SemVer.initialize", "code": "def initialize(version_string)\n raise VersionError, \"Invalid semantic version number: #{version_string}\" unless version_string.valid_version?\n\n @maj, @min, @patch = version_string.split(/\\./)\n @pre = nil\n if @patch =~ /(-?[^0-9]+\\d*)$/\n @pre = Regexp.last_match(1).sub(/^-/, \"\")\n @patch = @patch.sub(/(-?[^0-9]+\\d*)$/, \"\")\n end\n\n @maj = @maj.to_i\n @min = @min.to_i\n @patch = @patch.to_i\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/semver.rb#L14-L27", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SL.::String.clean", "code": "def clean\n gsub(/\\n+/, \" \")\n .gsub(/\"/, \""\")\n .gsub(/\\|/, \"-\")\n .gsub(/([&?]utm_[scm].+=[^&\\s!,.)\\]]++?)+(&.*)/, '\\2')\n .sub(/\\?&/, \"\").strip\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/string.rb#L142-L148", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SL.::String.remove_protocol", "code": "def remove_protocol\n sub(%r{^(https?|s?ftp|file)://}, \"\")\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/string.rb#L165-L167", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SL.::String.remove_seo", "code": "def remove_seo(url)\n title = dup\n url = URI.parse(url)\n host = url.hostname\n unless host\n return self unless SL.config[\"debug\"]\n\n SL.add_error(\"Invalid URL\", \"Could not remove SEO for #{url}\")\n return self\n end\n\n path = url.path\n root_page = path =~ %r{^/?$} ? true : false\n\n title.gsub!(/\\s*(–|—)\\s*/, \" - \")\n title.gsub!(/&[lr]dquo;/, '\"')\n title.gsub!(/&[lr]dquo;/, \"'\")\n title.gsub!(/–/, \" — \")\n title = CGI.unescapeHTML(title)\n title.gsub!(/ +/, \" \")\n\n seo_title_separators = %w[| » « — – - · :]\n\n begin\n re_parts = []\n\n host_parts = host.sub(/(?:www\\.)?(.*?)\\.[^.]+$/, '\\1').split(/\\./).delete_if { |p| p.length < 3 }\n h_re = !host_parts.empty? ? host_parts.map { |seg| seg.downcase.split(//).join(\".?\") }.join(\"|\") : \"\"\n re_parts.push(h_re) unless h_re.empty?\n\n # p_re = path.path_elements.map{|seg| seg.downcase.split(//).join('.?') }.join('|')\n # re_parts.push(p_re) if p_re.length > 0\n\n site_re = \"(#{re_parts.join('|')})\"\n\n dead_switch = 0\n\n while title.downcase.gsub(/[^a-z]/i, \"\") =~ /#{site_re}/i\n break if dead_switch > 5\n\n seo_title_separators.each_with_index do |sep, i|\n parts = title.split(/ *#{Regexp.escape(sep)} +/)\n\n next if parts.length == 1\n\n remaining_separators = seo_title_separators[i..].map { |s| Regexp.escape(s) }.join(\"\")\n seps = Regexp.new(\"^[^#{remaining_separators}]+$\")\n\n longest = parts.longest_element.strip\n\n unless parts.empty?\n parts.delete_if do |pt|\n compressed = pt.strip.downcase.gsub(/[^a-z]/i, \"\")\n compressed =~ /#{site_re}/ && pt =~ seps ? !root_page : false\n end\n end\n\n title = if parts.empty?\n longest\n elsif parts.length < 2\n parts.join(sep)\n elsif parts.length > 2\n parts.longest_element.strip\n else\n parts.join(sep)\n end\n end\n dead_switch += 1\n end\n rescue StandardError => e\n return self unless SL.config[\"debug\"]\n\n SL.add_error(\"Error SEO processing title for #{url}\", e)\n return self\n end\n\n seps = Regexp.new(\" *[#{seo_title_separators.map { |s| Regexp.escape(s) }.join('')}] +\")\n if title =~ seps\n seo_parts = title.split(seps)\n title = seo_parts.longest_element.strip if seo_parts.length.positive?\n end\n\n title && title.length > 5 ? title.gsub(/\\s+/, \" \") : CGI.unescapeHTML(self)\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/string.rb#L257-L340", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SL.::String.truncate!", "code": "def truncate!(max)\n replace truncate(max)\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/string.rb#L349-L351", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search_with_timeout", "code": "def search_with_timeout(search, timeout)\n url = nil\n title = nil\n link_text = nil\n\n begin\n Timeout.timeout(timeout) do\n url, title, link_text = search.call\n end\n rescue Timeout::Error\n SL.add_error(\"Timeout\", \"Search timed out\")\n url, title, link_text = false\n end\n\n [url, title, link_text]\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/util.rb#L58-L73", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cache_file_for", "code": "def cache_file_for(filename)\n cache_folder = File.expand_path(\"~/.config/searchlink/cache\")\n FileUtils.mkdir_p(cache_folder) unless File.directory?(cache_folder)\n File.join(cache_folder, filename.sub(/(\\.cache)?$/, \".cache\"))\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/util.rb#L83-L87", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search_paths", "code": "def search_paths(path = ENV[\"PATH\"])\n paths = if path && !path.empty?\n path.split(::File::PATH_SEPARATOR)\n else\n %w[/usr/local/bin /usr/ucb /usr/bin /bin /opt/homebrew/bin]\n end\n paths.select(&Dir.method(:exist?))\n end", "docstring": "# Find default system paths", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/which.rb#L87-L94", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "file_with_path?", "code": "def file_with_path?(cmd)\n ::File.expand_path(cmd) == cmd\n end", "docstring": "# Check if executable file is part of absolute/relative path", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/which.rb#L165-L167", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Curl.Html.extract_tag", "code": "def extract_tag(tag, attribute = nil, source: false, content: false)\n res = extract_tag_contents(tag, source: true)\n\n return res if source\n\n res.map! do |tag_source|\n m = tag_source.to_enum(:scan, /(\\S+)=(['\"])(.*?)\\2/).map { Regexp.last_match }\n attrs = m.each_with_object({}) { |at, a| a[at[1]] = at[3] }\n tags = tag_source.match(/<.*?>(?.*?)(?!\\s[^>]+)?\n (?:\\s*/>|>(?.*?)>)}).map { Regexp.last_match }\n res.map do |tag|\n if tag[\"attrs\"].nil?\n attrs = nil\n else\n attrs = tag[\"attrs\"].strip.to_enum(:scan, /(?ix)\n (?[@a-z0-9-]+)(?:=(?[\"'])\n (?[^\"']+)\\k|[ >])?/i).map { Regexp.last_match }\n attrs.map! { |a| { key: a[\"key\"], value: a[\"key\"] =~ /^(class|rel)$/ ? a[\"value\"].split(/ /) : a[\"value\"] } }\n end\n {\n tag: tag[\"tag\"],\n source: tag.to_s,\n attrs: attrs,\n content: tag[\"content\"],\n tags: content_tags(tag[\"content\"])\n }\n end\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/curl/html.rb#L243-L266", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "applemusic", "code": "def applemusic(terms, media = \"music\", entity = \"\")\n url = \"https://itunes.apple.com/search?term=#{terms.url_encode}&country=#{SL.config['country_code']}&media=#{media}&entity=#{entity}\"\n page = Curl::Json.new(url, compressed: true, symbolize_names: true)\n json = page.json\n return false unless json[:resultCount]&.positive?\n\n output = process_result(json[:results][0])\n\n return false if output.empty?\n\n output\n end", "docstring": "# Search apple music", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/applemusic.rb#L76-L87", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search_hook", "code": "def search_hook(search)\n types = %w[name path address]\n query = search.strip.split(\" \").map { |s| types.map { |t| %(#{t} contains \"#{s}\") }.join(\" or \") }\n query = query.map { |q| \"(#{q})\" }.join(\" and \")\n path_matches = run_query(query)\n\n top_match = path_matches.uniq.first\n return false unless top_match\n\n [top_match[:url], top_match[:name]]\n end", "docstring": "# Search bookmark paths and addresses. Return array of bookmark hashes.", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/hook.rb#L64-L74", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search", "code": "def search(search_type, search_terms, link_text)\n # You can branch to multiple searches by testing the search_type\n case search_type\n when /e$/\n url, title = SL.ddg(\"site:genius.com #{search_terms}\", link_text)\n if url\n title = get_lyrics(url)\n # To return an embed, set url (first parameter in the return\n # array) to 'embed', and put the embed contents in the second\n # parameter.\n title ? [\"embed\", title, link_text] : false\n else\n # Use `SL#add_error(title, text)` to add errors to the HTML\n # report. The report will only be shown if errors have been added.\n SL.add_error(\"No lyrics found\", \"Song lyrics for #{search_terms} not found\")\n false\n end\n when /js$/\n url, title = SL.ddg(\"site:genius.com #{search_terms}\", link_text)\n if url\n title = js_embed(url)\n title ? [\"embed\", title, link_text] : false\n else\n SL.add_error(\"No lyrics found\", \"Song lyrics for #{search_terms} not found\")\n false\n end\n else\n # You can perform a DuckDuckGo search using SL#ddg, passing the\n # search terms and link_text. It will return url, title, and\n # link_text. SL#ddg will add its own errors, and if it returns false\n # that will automatically be tested for, no additional error\n # handling is required.\n url, title, link_text = SL.ddg(\"site:genius.com #{search_terms}\", link_text)\n # Always return an array containing the resulting URL, the title,\n # and the link_text variable that was passed in, even if it's\n # unmodified.\n [url, title, link_text]\n end\n end", "docstring": "# Every plugin must contain a #search method that takes 3 arguments:", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/lyrics.rb#L53-L91", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search", "code": "def search(_, search_terms, link_text)\n unless SL.config[\"pinboard_api_key\"]\n SL.add_error(\"Missing Pinboard API token\",\n \"Find your api key at https://pinboard.in/settings/password and add it\n to your configuration (pinboard_api_key: YOURKEY)\")\n return false\n end\n\n exact_match = false\n match_phrases = []\n\n # If search terms start with ''term, only search for exact string matches\n case search_terms\n when /^ *'/\n exact_match = true\n search_terms.gsub!(/(^ *'+|'+ *$)/, \"\")\n when /%22(.*?)%22/\n match_phrases = search_terms.scan(/%22(\\S.*?\\S)%22/)\n search_terms.gsub!(/%22(\\S.*?\\S)%22/, \"\")\n end\n\n cache = load_pinboard_cache\n # cache = pinboard_bookmarks\n bookmarks = cache[\"bookmarks\"]\n\n if exact_match\n bookmarks.each do |bm|\n text = [bm[\"description\"], bm[\"extended\"], bm[\"tags\"]].join(\" \")\n\n return [bm[\"href\"], bm[\"description\"], link_text] if text.matches_exact(search_terms)\n end\n\n return false\n end\n\n unless match_phrases.empty?\n bookmarks.delete_if do |bm|\n matched = tru\n full_text = [bm[\"description\"], bm[\"extended\"], bm[\"tags\"]].join(\" \")\n match_phrases.each do |phrase|\n matched = false unless full_text.matches_exact(phrase)\n end\n !matched\n end\n end\n\n matches = []\n bookmarks.each do |bm|\n title_tags = [bm[\"description\"], bm[\"tags\"]].join(\" \")\n full_text = [bm[\"description\"], bm[\"extended\"], bm[\"tags\"]].join(\" \")\n\n score = if title_tags.matches_exact(search_terms)\n 14.0\n elsif full_text.matches_exact(search_terms)\n 13.0\n elsif full_text.matches_any(search_terms)\n full_text.matches_score(search_terms)\n else\n 0\n end\n\n return [bm[\"href\"], bm[\"description\"], link_text] if score == 14\n\n next unless score.positive?\n\n matches.push({\n score: score,\n href: bm[\"href\"],\n title: bm[\"description\"],\n date: bm[\"time\"]\n })\n end\n\n return false if matches.empty?\n\n top = matches.max_by { |bm| [bm[:score], bm[:date]] }\n\n return false unless top\n\n [top[:href], top[:title], link_text]\n end", "docstring": "# Search pinboard bookmarks", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/pinboard.rb#L107-L187", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search_edge_history", "code": "def search_edge_history(term)\n base = File.expand_path(\"~/Library/Application Support/Microsoft Edge/\")\n profiles = Dir.glob(\"**/History\", base: base)\n profiles.delete_if { |p| p =~ /^Snapshots/ }\n profiles.map! { |f| File.join(base, f) }\n\n res = false\n\n profiles.each do |bookmarks|\n next unless File.exist?(bookmarks)\n\n profile = bookmarks.match(%r{Edge/([^/]+)/})[1]\n\n SL.notify(\"Searching Chrome History for profile #{profile}\", term)\n res = search_chromium_history(bookmarks, term)\n\n break if res\n end\n\n res\n end", "docstring": "## Search Edge history", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/helpers/chromium.rb#L61-L81", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search_edge_bookmarks", "code": "def search_edge_bookmarks(term)\n base = File.expand_path(\"~/Library/Application Support/Microsoft Edge\")\n profiles = Dir.glob(\"**/Bookmarks\", base: base)\n profiles.delete_if { |p| p =~ /^Snapshots/ }\n profiles.map! { |f| File.join(base, f) }\n\n res = false\n\n profiles.each do |bookmarks|\n next unless File.exist?(bookmarks)\n\n profile = bookmarks.match(%r{Edge/([^/]+)/})[1]\n\n SL.notify(\"Searching Edge Bookmarks for profile #{profile}\", term)\n res = search_chromium_bookmarks(bookmarks, term)\n break if res\n end\n\n res\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/helpers/chromium.rb#L224-L243", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search_chrome_bookmarks", "code": "def search_chrome_bookmarks(term)\n base = File.expand_path(\"~/Library/Application Support/Google/Chrome/\")\n profiles = Dir.glob(\"**/Bookmarks\", base: base)\n profiles.delete_if { |p| p =~ /^Snapshots/ }\n profiles.map! { |f| File.join(base, f) }\n\n res = false\n\n profiles.each do |bookmarks|\n next unless File.exist?(bookmarks)\n\n profile = bookmarks.match(%r{Chrome/([^/]+)/})[1]\n\n SL.notify(\"Searching Chrome Bookmarks for profile #{profile}\", term)\n res = search_chromium_bookmarks(bookmarks, term)\n break if res\n end\n\n res\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/helpers/chromium.rb#L252-L271", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search_arc_json", "code": "def search_arc_json(bookmarks_file, term)\n arc_bookmarks = JSON.parse(IO.read(bookmarks_file))\n\n exact_match = false\n match_phrases = []\n\n # If search terms start with ''term, only search for exact string matches\n if term =~ /^ *'/\n exact_match = true\n term.gsub!(/(^ *'+|'+ *$)/, \"\")\n elsif term =~ /%22(.*?)%22/\n match_phrases = term.scan(/%22(\\S.*?\\S)%22/)\n term.gsub!(/%22(\\S.*?\\S)%22/, \"\")\n end\n\n if arc_bookmarks\n bookmarks = []\n arc_bookmarks[\"sidebarSyncState\"][\"items\"].each do |mark|\n next if mark.is_a?(String)\n\n next unless mark[\"value\"][\"childrenIds\"].empty?\n\n next unless mark[\"value\"][\"data\"][\"tab\"]\n\n url = {\n url: mark[\"value\"][\"data\"][\"tab\"][\"savedURL\"],\n saved_title: mark[\"value\"][\"data\"][\"tab\"][\"savedTitle\"],\n title: mark[\"value\"][\"title\"],\n created: mark[\"value\"][\"createdAt\"].to_datetime,\n active: mark[\"value\"][\"data\"][\"tab\"][\"timeLastActiveAt\"]&.to_datetime\n }\n\n score = score_mark(url, term)\n\n if score > 7\n url[:score] = score\n bookmarks << url\n end\n end\n\n unless bookmarks.empty?\n if exact_match\n bookmarks.delete_if do |bm|\n !(bm[:url].matches_exact(term) ||\n bm[:title].matches_exact(term) ||\n bm[:saved_title].matches_exact(term))\n end\n end\n\n if match_phrases\n match_phrases.map! { |phrase| phrase[0] }\n bookmarks.delete_if do |bm|\n matched = true\n match_phrases.each do |phrase|\n matched = false unless bm[:url].matches_exact(phrase) ||\n bm[:title].matches_exact(phrase) ||\n bm[:saved_title].matches_exact(phrase)\n end\n !matched\n end\n end\n\n return false if bookmarks.empty?\n\n lastest_bookmark = bookmarks.min_by { |u| u[:created] }\n\n return [lastest_bookmark[:url], lastest_bookmark[:title], lastest_bookmark[:date]]\n end\n end\n\n false\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/helpers/chromium.rb#L281-L352", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "search_chromium_bookmarks", "code": "def search_chromium_bookmarks(bookmarks_file, term)\n chrome_bookmarks = JSON.parse(IO.read(bookmarks_file))\n\n exact_match = false\n match_phrases = []\n\n # If search terms start with ''term, only search for exact string matches\n if term =~ /^ *'/\n exact_match = true\n term.gsub!(/(^ *'+|'+ *$)/, \"\")\n elsif term =~ /%22(.*?)%22/\n match_phrases = term.scan(/%22(\\S.*?\\S)%22/)\n term.gsub!(/%22(\\S.*?\\S)%22/, \"\")\n end\n\n if chrome_bookmarks\n roots = chrome_bookmarks[\"roots\"]\n\n urls = extract_chrome_bookmarks(roots, [], term)\n\n unless urls.empty?\n urls.delete_if { |bm| !(bm[:url].matches_exact(term) || bm[:title].matches_exact(term)) } if exact_match\n\n if match_phrases\n match_phrases.map! { |phrase| phrase[0] }\n urls.delete_if do |bm|\n matched = true\n match_phrases.each do |phrase|\n matched = false unless bm[:url].matches_exact(phrase) || bm[:title].matches_exact(phrase)\n end\n !matched\n end\n end\n\n return false if urls.empty?\n\n lastest_bookmark = urls.max_by { |u| u[:score] }\n\n return [lastest_bookmark[:url], lastest_bookmark[:title], lastest_bookmark[:date]]\n end\n end\n\n false\n end", "docstring": "##", "url": "https://github.com/ttscoff/searchlink/blob/70a0d794424e2312833310fdf8aeb78d18d13183/lib/searchlink/searches/helpers/chromium.rb#L364-L407", "sha": "70a0d794424e2312833310fdf8aeb78d18d13183"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Hosting::HetznerApis.reset", "code": "def reset(server_id, dist: \"Ubuntu 22.04.2 LTS base\")\n create_connection.post(path: \"/reset/#{server_id}\", body: \"type=hw\", expects: 200)\n nil\n end", "docstring": "# plugging it in again. Reset should only be used when reboot does not work.", "url": "https://github.com/ubicloud/ubicloud/blob/05593ec0c7f3cbae7ef9214933d69a50bd29e947/lib/hosting/hetzner_apis.rb#L31-L34", "sha": "05593ec0c7f3cbae7ef9214933d69a50bd29e947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Strand.load", "code": "def load(snap = nil)\n Object.const_get(\"::Prog::\" + prog).new(self, snap)\n end", "docstring": "# :nocov:", "url": "https://github.com/ubicloud/ubicloud/blob/05593ec0c7f3cbae7ef9214933d69a50bd29e947/model/strand.rb#L76-L78", "sha": "05593ec0c7f3cbae7ef9214933d69a50bd29e947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VmHost.download_cloud_hypervisor", "code": "def download_cloud_hypervisor(version_x64: nil, version_arm64: nil, sha256_ch_bin_x64: nil, sha256_ch_bin_arm64: nil, sha256_ch_remote_x64: nil, sha256_ch_remote_arm64: nil)\n version, sha256_ch_bin, sha256_ch_remote = if arch == \"x64\"\n [version_x64, sha256_ch_bin_x64, sha256_ch_remote_x64]\n elsif arch == \"arm64\"\n [version_arm64, sha256_ch_bin_arm64, sha256_ch_remote_arm64]\n else\n fail \"BUG: unexpected architecture\"\n end\n fail ArgumentError, \"No version provided\" if version.nil?\n Strand.create_with_id(prog: \"DownloadCloudHypervisor\", label: \"start\", stack: [{subject_id: id, version: version, sha256_ch_bin: sha256_ch_bin, sha256_ch_remote: sha256_ch_remote}])\n end", "docstring": "# Introduced for downloading cloud hypervisor via REPL.", "url": "https://github.com/ubicloud/ubicloud/blob/05593ec0c7f3cbae7ef9214933d69a50bd29e947/model/vm_host.rb#L235-L245", "sha": "05593ec0c7f3cbae7ef9214933d69a50bd29e947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VmHost.hardware_reset", "code": "def hardware_reset\n Hosting::Apis.hardware_reset_server(self)\n end", "docstring": "# plugging it in again. Reset should only be used when reboot does not work.", "url": "https://github.com/ubicloud/ubicloud/blob/05593ec0c7f3cbae7ef9214933d69a50bd29e947/model/vm_host.rb#L282-L284", "sha": "05593ec0c7f3cbae7ef9214933d69a50bd29e947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VmHostSlice.allowed_cpus_cgroup", "code": "def allowed_cpus_cgroup\n @allowed_cpus_cgroup ||= cpus.map(&:cpu_number).sort.slice_when { |a, b| b != a + 1 }.map do |group|\n (group.size > 1) ? \"#{group.first}-#{group.last}\" : group.first.to_s\n end.join(\",\")\n end", "docstring": "# (comma-separated ranges of cpus)", "url": "https://github.com/ubicloud/ubicloud/blob/05593ec0c7f3cbae7ef9214933d69a50bd29e947/model/vm_host_slice.rb#L21-L25", "sha": "05593ec0c7f3cbae7ef9214933d69a50bd29e947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Prog::Ai::InferenceEndpointReplicaNexus.ping_gateway", "code": "def ping_gateway\n api_key_ds = DB[:api_key]\n .where(owner_table: \"project\")\n .where(used_for: \"inference_endpoint\")\n .where(is_valid: true)\n .where(owner_id: Sequel[:project][:id])\n .exists\n\n eligible_projects_ds = Project.where(api_key_ds)\n eligible_projects_ds = eligible_projects_ds.where(id: inference_endpoint.project.id) unless inference_endpoint.is_public\n\n eligible_projects = eligible_projects_ds.all\n .select(&:active?)\n .map do\n {\n ubid: _1.ubid,\n api_keys: _1.api_keys.select { |k| k.used_for == \"inference_endpoint\" && k.is_valid }.map { |k| Digest::SHA2.hexdigest(k.key) },\n quota_rps: 50.0,\n quota_tps: 5000.0\n }\n end\n\n body = {\n replica_ubid: inference_endpoint_replica.ubid,\n public_endpoint: inference_endpoint.is_public,\n projects: eligible_projects\n }\n\n resp = vm.sshable.cmd(\"sudo curl -m 5 -s -H \\\"Content-Type: application/json\\\" -X POST --data-binary @- --unix-socket /ie/workdir/inference-gateway.clover.sock http://localhost/control\", stdin: body.to_json)\n project_usage = JSON.parse(resp)[\"projects\"]\n Clog.emit(\"Successfully pinged inference gateway.\") { {inference_endpoint: inference_endpoint.ubid, replica: inference_endpoint_replica.ubid, project_usage: project_usage} }\n update_billing_records(project_usage)\n end", "docstring": "# pushes latest config to inference gateway and collects billing information", "url": "https://github.com/ubicloud/ubicloud/blob/05593ec0c7f3cbae7ef9214933d69a50bd29e947/prog/ai/inference_endpoint_replica_nexus.rb#L164-L196", "sha": "05593ec0c7f3cbae7ef9214933d69a50bd29e947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Prog::Vnet::UpdateFirewallRules.consolidate_rules", "code": "def consolidate_rules(rules)\n port_segments = create_port_segments(rules)\n consolidated_rules = []\n\n port_segments.each do |segment|\n # Find rules that overlap with the current segment\n overlapping_rules = rules.select do |r|\n r.port_range.begin <= segment[:end] && r.port_range.end - 1 >= segment[:begin]\n end\n\n # Merge cidrs for overlapping rules\n merged_cidrs = if rules.first.cidr.version == 4\n NetAddr.summ_IPv4Net(overlapping_rules.map(&:cidr))\n else\n NetAddr.summ_IPv6Net(overlapping_rules.map(&:cidr))\n end\n merged_cidrs.each do |cidr|\n consolidated_rules << FirewallRuleObj.new(cidr, {begin: segment[:begin], end: segment[:end] + 1})\n end\n end\n\n combined_rules = combine_continuous_ranges_for_same_subnet(consolidated_rules)\n combined_rules_self = combined_rules.map do |r|\n if r.port_range[:begin] != r.port_range[:end] - 1\n \"#{r.cidr} . #{r.port_range[:begin]}-#{r.port_range[:end] - 1}\"\n else\n \"#{r.cidr} . #{r.port_range[:begin]}\"\n end\n end.join(\",\")\n\n combined_rules_lb_dest = vm.load_balancer ? combined_rules.filter_map do |r|\n if r.port_range[:begin] <= vm.load_balancer.src_port && vm.load_balancer.src_port <= r.port_range[:end] - 1\n \"#{r.cidr} . #{vm.load_balancer.dst_port}\"\n end\n end.join(\",\") : []\n [combined_rules_self, combined_rules_lb_dest]\n end", "docstring": "# nftables side to avoid performance issues.", "url": "https://github.com/ubicloud/ubicloud/blob/05593ec0c7f3cbae7ef9214933d69a50bd29e947/prog/vnet/update_firewall_rules.rb#L220-L256", "sha": "05593ec0c7f3cbae7ef9214933d69a50bd29e947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "r", "code": "def r(commandline, stdin: \"\", expect: [0])\n stdout, stderr, status = Open3.capture3(commandline, stdin_data: stdin)\n fail CommandFail.new(\"command failed: \" + commandline, stdout, stderr) unless expect.include?(status.exitstatus)\n stdout\nend", "docstring": "# rubocop:enable Lint/InheritException", "url": "https://github.com/ubicloud/ubicloud/blob/05593ec0c7f3cbae7ef9214933d69a50bd29e947/rhizome/common/lib/util.rb#L27-L31", "sha": "05593ec0c7f3cbae7ef9214933d69a50bd29e947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Configuration.serpapi_api_key", "code": "def serpapi_api_key(**kwargs)\n key_lookup(:serpapi_api_key, kwargs)\n end", "docstring": "# @return [String] The SerpAPI API key either from arg or env.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars.rb#L46-L48", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Boxcar.validate_inputs", "code": "def validate_inputs(inputs:)\n missing_keys = input_keys - inputs.keys\n raise \"Missing some input keys: #{missing_keys}\" if missing_keys.any?\n\n inputs\n end", "docstring": "# @raise [RuntimeError] If the inputs are not the same.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/boxcar.rb#L33-L38", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Boxcar.apply", "code": "def apply(input_list:)\n raise NotImplementedError\n end", "docstring": "# @return [Array] The list of outputs.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/boxcar.rb#L57-L59", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Boxcar.save", "code": "def save(path:)\n File.write(path, YAML.dump(self))\n end", "docstring": "# save this boxcar to a file", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/boxcar.rb#L110-L112", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Conversation.check_lines", "code": "def check_lines(lines)\n raise ArgumentError, \"Lines must be an array\" unless lines.is_a?(Array)\n\n lines.each do |ln|\n raise ArgumentError, \"Conversation item must be a array\" unless ln.is_a?(Array)\n raise ArgumentError, \"Conversation item must have 2 items, role and text\" unless ln.size == 2\n raise ArgumentError, \"Conversation item must have a role #{ln} in (#{PEOPLE})\" unless PEOPLE.include? ln[0]\n end\n end", "docstring": "# check the lines", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/conversation.rb#L16-L24", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Conversation.to_a", "code": "def to_a\n lines\n end", "docstring": "# @return [Array] The result as a convesation array", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/conversation.rb#L27-L29", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Conversation.as_messages", "code": "def as_messages(inputs = nil)\n { messages: no_history.map { |ln| { role: ln.first, content: cformat(ln.last, inputs) } } }\n rescue ::KeyError => e\n first_line = e.message.to_s.split(\"\\n\").first\n Boxcars.error \"Missing prompt input key: #{first_line}\"\n raise KeyError, \"Prompt format error: #{first_line}\"\n end", "docstring": "# @return [Hash] The formatted prompt { messages: ...}", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/conversation.rb#L90-L96", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.ConversationPrompt.as_messages", "code": "def as_messages(inputs)\n conversation.as_messages(inputs)\n end", "docstring": "# @return [Hash] The formatted prompt.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/conversation_prompt.rb#L20-L22", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.ConversationPrompt.with_conversation", "code": "def with_conversation(conversation)\n return self unless conversation\n\n new_prompt = dup\n new_prompt.conversation.add_conversation(conversation)\n new_prompt\n end", "docstring": "# tack on the ongoing conversation if present to the prompt", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/conversation_prompt.rb#L32-L38", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Engine.initialize", "code": "def initialize(description: 'Engine', name: nil, prompts: [], batch_size: 20)\n @name = name || self.class.name\n @description = description\n @prompts = prompts\n @batch_size = batch_size\n end", "docstring": "# @param batch_size [Integer] The number of prompts to send to the Engine at a time.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine.rb#L13-L18", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Train.return_stopped_response", "code": "def return_stopped_response(early_stopping_method, intermediate_steps, **kwargs)\n case early_stopping_method\n when \"force\"\n TrainFinish.new({ output: \"Agent stopped due to max iterations.\" }, \"\")\n when \"generate\"\n thoughts = \"\"\n intermediate_steps.each do |action, observation|\n thoughts += action.log\n thoughts += \"\\n#{observation_text(observation)}\\n#{engine_prefix}\"\n end\n thoughts += \"\\n\\nI now need to return a final answer based on the previous steps:\"\n new_inputs = { agent_scratchpad: thoughts, stop: _stop }\n full_inputs = kwargs.merge(new_inputs)\n full_output = predict(**full_inputs)\n parsed_output = extract_boxcar_and_input(full_output)\n if parsed_output.nil?\n TrainFinish.new({ output: full_output }, full_output)\n else\n boxcar, boxcar_input = parsed_output\n Boxcars.debug \"Got boxcar #{boxcar} and input #{boxcar_input}\"\n if boxcar == finish_boxcar_name\n TrainFinish.new({ output: boxcar_input }, full_output)\n else\n TrainFinish.new({ output: full_output }, full_output)\n end\n end\n else\n raise \"early_stopping_method should be one of `force` or `generate`, got #{early_stopping_method}\"\n end\n end", "docstring": "# @return [Boxcars::Action] The action to take.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/train.rb#L156-L185", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Train.call", "code": "def call(inputs:)\n prepare_for_new_call\n intermediate_steps = []\n iterations = 0\n while should_continue?(iterations)\n output = plan(intermediate_steps, **inputs)\n return pre_return(output, intermediate_steps) if output.is_a?(TrainFinish)\n\n if (boxcar = name_to_boxcar_map[output.boxcar])\n begin\n observation = Observation.ok(get_boxcar_result(boxcar, output.boxcar_input))\n return_direct = boxcar.return_direct\n rescue Boxcars::ConfigurationError, Boxcars::SecurityError => e\n raise e\n rescue StandardError => e\n Boxcars.error \"Error in #{boxcar.name} train#call: #{e}\\nbt:#{caller[0..5].join(\"\\n \")}\", :red\n observation = Observation.err(\"Error - #{e}, correct and try again.\")\n end\n elsif output.boxcar == :error\n observation = output.log\n return_direct = false\n else\n observation = Observation.err(\"Error - #{output.boxcar} is not a valid action, try again.\")\n return_direct = false\n end\n Boxcars.debug \"Observation: #{observation}\", :green\n intermediate_steps.append([output, observation])\n if return_direct\n output = TrainFinish.new({ return_values[0] => observation }, \"\")\n return pre_return(output, intermediate_steps)\n end\n iterations += 1\n end\n output = return_stopped_response(early_stopping_method, intermediate_steps, **inputs)\n pre_return(output, intermediate_steps)\n end", "docstring": "# @return [Hash] The output.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/train.rb#L202-L237", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.VectorSearch.call", "code": "def call(query:, count: 1)\n validate_query(query)\n query_vector = convert_query_to_vector(query)\n @vector_search_instance.call(query_vector: query_vector, count: count)\n end", "docstring": "# ]", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/vector_search.rb#L42-L46", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.EngineBoxcar.initialize", "code": "def initialize(prompt:, engine: nil, **kwargs)\n @prompt = prompt\n @engine = engine || Boxcars.engine.new\n @top_k = kwargs.delete(:top_k) || 5\n @stop = kwargs.delete(:stop) || [\"Answer:\"]\n super(**kwargs)\n end", "docstring": "# @param kwargs [Hash] Additional arguments including: name, description, top_k, return_direct, and stop", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/boxcar/engine_boxcar.rb#L13-L19", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.EngineBoxcar.input_key", "code": "def input_key\n input_keys.first\n end", "docstring": "# the first input key for the prompt", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/boxcar/engine_boxcar.rb#L27-L29", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.JSONEngineBoxcar.extract_answer", "code": "def extract_answer(data)\n reply = data\n Result.new(status: :ok, answer: reply, explanation: reply)\n end", "docstring": "# @return [Result] The result.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/boxcar/json_engine_boxcar.rb#L66-L69", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Anthropic.get_num_tokens", "code": "def get_num_tokens(text:)\n text.split.length # TODO: hook up to token counting gem\n end", "docstring": "# calculate the number of tokens used", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/anthropic.rb#L159-L161", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Anthropic.max_tokens_for_prompt", "code": "def max_tokens_for_prompt(prompt_text)\n num_tokens = get_num_tokens(prompt_text)\n\n # get max context size for model by name\n max_size = modelname_to_contextsize(model_name)\n max_size - num_tokens\n end", "docstring": "# @return [Integer] the number of tokens possible to generate.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/anthropic.rb#L172-L178", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Cohere.initialize", "code": "def initialize(name: DEFAULT_NAME, description: DEFAULT_DESCRIPTION, prompts: [], **kwargs)\n @llm_params = DEFAULT_PARAMS.merge(kwargs)\n @prompts = prompts\n @batch_size = 20\n super(description: description, name: name)\n end", "docstring": "# @param prompts [Array] The prompts to use when asking the engine. Defaults to [].", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/cohere.rb#L28-L33", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.GeminiAi.initialize", "code": "def initialize(name: DEFAULT_NAME, description: DEFAULT_DESCRIPTION, prompts: [], batch_size: 20, **kwargs)\n @llm_parmas = DEFAULT_PARAMS.merge(kwargs)\n @prompts = prompts\n @batch_size = batch_size\n super(description: description, name: name)\n end", "docstring": "# @param batch_size [Integer] The number of prompts to send to the engine at once. Defaults to 20.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/gemini_ai.rb#L27-L32", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.GeminiAi.run", "code": "def run(question, **kwargs)\n prompt = Prompt.new(template: question)\n response = client(prompt: prompt, **kwargs)\n raise Error, \"GeminiAI: No response from API\" unless response\n\n check_response(response)\n response[\"choices\"].map { |c| c.dig(\"message\", \"content\") || c[\"text\"] }.join(\"\\n\").strip\n end", "docstring": "# @param kwargs [Hash] Additional parameters to pass to the engine if wanted.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/gemini_ai.rb#L69-L76", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.GeminiAi.default_params", "code": "def default_params\n llm_params\n end", "docstring": "# Get the default parameters for the engine.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/gemini_ai.rb#L79-L81", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Gpt4allEng.initialize", "code": "def initialize(name: DEFAULT_NAME, description: DEFAULT_DESCRIPTION, prompts: [], batch_size: 2, **_kwargs)\n @prompts = prompts\n @batch_size = batch_size\n super(description: description, name: name)\n end", "docstring": "# @param batch_size [Integer] The number of prompts to send to the engine at once. Defaults to 2.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/gpt4all_eng.rb#L22-L26", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Gpt4allEng.client", "code": "def client(prompt:, inputs: {}, **_kwargs)\n gpt4all = Gpt4all::ConversationalAI.new\n gpt4all.prepare_resources(force_download: false)\n gpt4all.start_bot\n input_text = prompt.as_prompt(inputs: inputs)[:prompt]\n Boxcars.debug(\"Prompt after formatting:\\n#{input_text}\", :cyan) if Boxcars.configuration.log_prompts\n gpt4all.prompt(input_text)\n rescue StandardError => e\n Boxcars.error([\"Error from gpt4all engine: #{e}\", e.backtrace[-5..-1]].flatten.join(\"\\n \"))\n ensure\n gpt4all.stop_bot\n end", "docstring": "# @param kwargs [Hash] Additional parameters to pass to the engine if wanted.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/gpt4all_eng.rb#L33-L44", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Gpt4allEng.run", "code": "def run(question, **kwargs)\n prompt = Prompt.new(template: question)\n answer = client(prompt: prompt, **kwargs)\n Boxcars.debug(\"Answer: #{answer}\", :cyan)\n answer\n end", "docstring": "# @param kwargs [Hash] Additional parameters to pass to the engine if wanted.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/gpt4all_eng.rb#L49-L54", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.IntelligenceBase.default_model_params", "code": "def default_model_params\n {}\n end", "docstring": "# can be overridden by provider subclass", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/intelligence_base.rb#L24-L26", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.IntelligenceBase.validate_content", "code": "def validate_content(content)\n raise ArgumentError, \"Content must have type and text fields\" unless content[:type] && content[:text]\n\n content\n end", "docstring": "# Validate content structure", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/intelligence_base.rb#L53-L57", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Openai.initialize", "code": "def initialize(name: DEFAULT_NAME, description: DEFAULT_DESCRIPTION, prompts: [], batch_size: 20, **kwargs)\n @open_ai_params = DEFAULT_PARAMS.merge(kwargs)\n if @open_ai_params[:model] =~ /^o/ && @open_ai_params[:max_tokens].present?\n @open_ai_params[:max_completion_tokens] = @open_ai_params.delete(:max_tokens)\n @open_ai_params.delete(:temperature)\n end\n\n @prompts = prompts\n @batch_size = batch_size\n super(description: description, name: name)\n end", "docstring": "# @param batch_size [Integer] The number of prompts to send to the engine at once. Defaults to 20.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/openai.rb#L29-L39", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Openai.check_response", "code": "def check_response(response, must_haves: %w[choices])\n if response['error']\n code = response.dig('error', 'code')\n msg = response.dig('error', 'message') || 'unknown error'\n raise KeyError, \"OPENAI_ACCESS_TOKEN not valid\" if code == 'invalid_api_key'\n\n raise ValueError, \"OpenAI error: #{msg}\"\n end\n\n must_haves.each do |key|\n raise ValueError, \"Expecting key #{key} in response\" unless response.key?(key)\n end\n end", "docstring": "# @raise [ValueError] if the response is not valid.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/openai.rb#L109-L121", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.Perplexityai.initialize", "code": "def initialize(name: DEFAULT_PER_NAME, description: DEFAULT_PER_DESCRIPTION, prompts: [], batch_size: 20, **kwargs)\n @perplexity_params = DEFAULT_PER_PARAMS.merge(kwargs)\n @prompts = prompts\n @batch_size = batch_size\n super(description: description, name: name)\n end", "docstring": "# @param batch_size [Integer] The number of prompts to send to the engine at once. Defaults to 20.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/engine/perplexityai.rb#L27-L32", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.TrainAction.initialize", "code": "def initialize(boxcar:, log:, boxcar_input: nil)\n @boxcar_input = boxcar_input\n @boxcar = boxcar\n @log = log\n end", "docstring": "# @return [Boxcars::TrainAction] The train action.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/train/train_action.rb#L13-L17", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.ZeroShot.extract_boxcar_and_input", "code": "def extract_boxcar_and_input(text)\n get_action_and_input(engine_output: text)\n rescue StandardError => e\n [:error, e.message]\n end", "docstring": "# @return [Array] The boxcar and input.", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/train/zero_shot.rb#L30-L34", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Boxcars.VectorStore.InMemory.BuildFromFiles.initialize", "code": "def initialize(params)\n @split_chunk_size = params[:split_chunk_size] || 2000\n @training_data_path = File.absolute_path(params[:training_data_path])\n @embedding_tool = params[:embedding_tool] || :openai\n\n validate_params(embedding_tool, training_data_path)\n @memory_vectors = []\n end", "docstring": "# @return [Hash] vector_store: array of hashes with :content, :metadata, and :embedding keys", "url": "https://github.com/BoxcarsAI/boxcars/blob/9a294a13e96cbb628adb542807464e81a94d6535/lib/boxcars/vector_store/in_memory/build_from_files.rb#L15-L22", "sha": "9a294a13e96cbb628adb542807464e81a94d6535"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "set_distance_type!", "code": "def set_distance_type! distance_function\n Flann.send(:flann_set_distance_type, distance_function, get_distance_order)\n self\n end", "docstring": "# Set the distance function to use when computing distances between data points.", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/flann/src/ruby/lib/flann.rb#L199-L202", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "read_dataset", "code": "def read_dataset filename\n Dir.chdir(\"spec\") do\n f = File.new(filename, 'r')\n n = NMatrix.new([65536, 3], dtype: :float32)\n i = 0\n while line = f.gets\n line.chomp!\n fields = line.split\n n[i,:*] = fields.map { |field| field.to_f }\n\n i += 1\n end\n\n n\n end\nend", "docstring": "# Helper function for reading a test dataset so we can test nearest neighbors", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/flann/src/ruby/spec/spec_helper.rb#L35-L50", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "By.Server.initialize", "code": "def initialize(socket_path: default_socket_path, argv: default_argv, debug: default_debug,\n daemonize: default_daemonize, daemon_args: default_daemon_args,\n worker_class: default_worker_class)\n @socket_path = socket_path\n @argv = argv\n @debug = debug\n if @daemonize = !!daemonize\n @daemon_args = Array(daemon_args)\n end\n @worker_class = worker_class\n end", "docstring": "# worker_class: The class to use for worker process handling, Worker by default.", "url": "https://github.com/jeremyevans/by/blob/20a61981f559498b6a47ecfdb8e694880d42c9ec/lib/by/server.rb#L30-L40", "sha": "20a61981f559498b6a47ecfdb8e694880d42c9ec"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "By.Server.print_loaded_features", "code": "def print_loaded_features\n puts $LOADED_FEATURES\n end", "docstring": "# Print $LOADED_FEATURES to stdout.", "url": "https://github.com/jeremyevans/by/blob/20a61981f559498b6a47ecfdb8e694880d42c9ec/lib/by/server.rb#L207-L209", "sha": "20a61981f559498b6a47ecfdb8e694880d42c9ec"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "By.Worker.reopen_stdio", "code": "def reopen_stdio\n $stdin.reopen(@socket.recv_io(IO))\n $stdout.reopen(@socket.recv_io(IO))\n $stderr.reopen(@socket.recv_io(IO))\n end", "docstring": "# Replace stdin, stdout, stderr with the IO values provided by the client.", "url": "https://github.com/jeremyevans/by/blob/20a61981f559498b6a47ecfdb8e694880d42c9ec/lib/by/worker.rb#L36-L40", "sha": "20a61981f559498b6a47ecfdb8e694880d42c9ec"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UsersController.show", "code": "def show\n render json: @user\n end", "docstring": "# GET /users/1", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/rubytestserver/testapi/app/controllers/users_controller.rb#L12-L14", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UsersController.update", "code": "def update\n if @user.update(user_params)\n render json: @user\n else\n render json: @user.errors, status: :unprocessable_entity\n end\n end", "docstring": "# PATCH/PUT /users/1", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/rubytestserver/testapi/app/controllers/users_controller.rb#L28-L34", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "random_str", "code": "def random_str(size)\n @db.get_first_value(\"select hex(randomblob(?))\", size)\nend", "docstring": "# sqlite database for fast random string generation", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/bench/bench.rb#L28-L30", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActiveSupport.Cache.Litecache.read_entry", "code": "def read_entry(key, **options)\n deserialize_entry(@cache.get(key))\n end", "docstring": "# Read an entry from the cache.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/active_support/cache/litecache.rb#L80-L82", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActiveSupport.Cache.Litecache.delete_entry", "code": "def delete_entry(key, **options)\n @cache.delete(key)\n end", "docstring": "# Delete an entry from the cache.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/active_support/cache/litecache.rb#L120-L122", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Litecable.subscribe", "code": "def subscribe(channel, subscriber, success_callback = nil)\n @subscribers.acquire do |subs|\n subs[channel] = {} unless subs[channel]\n subs[channel][subscriber] = true\n end\n success_callback&.call\n capture(:subscribe, channel)\n end", "docstring": "# subscribe to a channel, optionally providing a success callback proc", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/litestack/litecable.rb#L40-L47", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Litecache.increment", "code": "def increment(key, amount = 1, expires_in = nil)\n expires_in ||= @expires_in\n @conn.acquire { |cache| cache.stmts[:incrementer].execute!(key.to_s, amount, expires_in)[0][0] }\n end", "docstring": "# increment an integer value by amount, optionally add an expiry value (in seconds)", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/litestack/litecache.rb#L165-L168", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Litesupport.Liteconnection.with_connection", "code": "def with_connection\n @conn.acquire do |conn|\n @checked_out_conn = conn\n yield conn\n ensure\n @checked_out_conn = nil\n end\n end", "docstring": "# checked out connection if one exists", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/litestack/liteconnection.rb#L132-L139", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Litedb.transaction", "code": "def transaction(mode = :immediate)\n super(mode)\n end", "docstring": "# enforce immediate mode to avoid deadlocks for a small performance penalty", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/litestack/litedb.rb#L43-L45", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Litejobqueue.delete", "code": "def delete(id)\n job = super(id)\n @logger.info(\"[litejob]:[DEL] job: #{job}\")\n job = Oj.load(job[0], symbol_keys: true) if job\n job\n end", "docstring": "# jobqueue.delete(id)", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/litestack/litejobqueue.rb#L120-L125", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Litequeue.clear", "code": "def clear(queue = nil)\n run_sql(\"DELETE FROM queue WHERE iif(?1 IS NOT NULL, name = ?1, TRUE)\", queue)\n end", "docstring": "# deletes all the entries in all queues, or if a queue name is given, deletes all entries in that specific queue", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/litestack/litequeue.rb#L79-L81", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatabaseMethods.supports_create_table_if_not_exists?", "code": "def supports_create_table_if_not_exists?\n sqlite_version >= 30300\n end", "docstring": "# SQLite supports CREATE TABLE IF NOT EXISTS syntax since 3.3.0.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L129-L131", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatabaseMethods.supports_deferrable_foreign_key_constraints?", "code": "def supports_deferrable_foreign_key_constraints?\n sqlite_version >= 30619\n end", "docstring": "# SQLite 3.6.19+ supports deferrable foreign key constraints.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L134-L136", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatabaseMethods.views", "code": "def views(opts = OPTS)\n tables_and_views({type: \"view\"}, opts)\n end", "docstring": "# :server :: Set the server to use.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L180-L182", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatabaseMethods._foreign_key_list_ds", "code": "def _foreign_key_list_ds(table)\n metadata_dataset.with_sql(\"PRAGMA foreign_key_list(?)\", input_identifier_meth.call(table))\n end", "docstring": "# Dataset used for parsing foreign key lists", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L187-L189", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatabaseMethods._parse_pragma_ds", "code": "def _parse_pragma_ds(table_name, opts)\n metadata_dataset.with_sql(\"PRAGMA table_#{\"x\" if sqlite_version > 33100}info(?)\", input_identifier_meth(opts[:dataset]).call(table_name))\n end", "docstring": "# Dataset used for parsing schema", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L192-L194", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.complex_expression_sql_append", "code": "def complex_expression_sql_append(sql, op, args)\n case op\n when :\"NOT LIKE\", :\"NOT ILIKE\"\n sql << \"NOT \"\n complex_expression_sql_append(sql, ((op == :\"NOT ILIKE\") ? :ILIKE : :LIKE), args)\n when :^\n complex_expression_arg_pairs_append(sql, args) { |a, b| Sequel.lit([\"((~(\", \" & \", \")) & (\", \" | \", \"))\"], a, b, a, b) }\n when :**\n unless (exp = args[1]).is_a?(Integer)\n raise(Sequel::Error, \"can only emulate exponentiation on SQLite if exponent is an integer, given #{exp.inspect}\")\n end\n case exp\n when 0\n sql << \"1\"\n else\n sql << \"(\"\n arg = args[0]\n if exp < 0\n invert = true\n exp = exp.abs\n sql << \"(1.0 / (\"\n end\n (exp - 1).times do\n literal_append(sql, arg)\n sql << \" * \"\n end\n literal_append(sql, arg)\n sql << \")\"\n if invert\n sql << \"))\"\n end\n end\n when :extract\n part = args[0]\n raise(Sequel::Error, \"unsupported extract argument: #{part.inspect}\") unless (format = EXTRACT_MAP[part])\n sql << \"CAST(strftime(\" << format << \", \"\n literal_append(sql, args[1])\n sql << \") AS \" << ((part == :second) ? \"NUMERIC\" : \"INTEGER\") << \")\"\n else\n super\n end\n end", "docstring": "# It doesn't support xor, power, or the extract function natively, so those have to be emulated.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L600-L641", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.explain", "code": "def explain(opts = nil)\n # Load the PrettyTable class, needed for explain output\n Sequel.extension(:_pretty_table) unless defined?(Sequel::PrettyTable)\n\n ds = db.send(:metadata_dataset).clone(sql: \"EXPLAIN #{select_sql}\")\n rows = ds.all\n Sequel::PrettyTable.string(rows, ds.columns)\n end", "docstring": "# to be compatible with other adapters.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L669-L676", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.having", "code": "def having(*cond)\n raise(InvalidOperation, \"Can only specify a HAVING clause on a grouped dataset\") if !@opts[:group] && db.sqlite_version < 33900\n super\n end", "docstring": "# HAVING requires GROUP BY on SQLite", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L679-L682", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.insert_select", "code": "def insert_select(*values)\n return unless supports_insert_select?\n # Handle case where query does not return a row\n server?(:default).with_sql_first(insert_select_sql(*values)) || false\n end", "docstring": "# returning instead of a separate query.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L686-L690", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.returning", "code": "def returning(*values)\n return super if values.empty?\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(returning: _returning_values(values).freeze)\n end", "docstring": "# Automatically add aliases to RETURNING values to work around SQLite bug.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L781-L785", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.supports_cte?", "code": "def supports_cte?(type = :select)\n db.sqlite_version >= 30803\n end", "docstring": "# SQLite 3.8.3+ supports common table expressions.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L788-L790", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.supports_deleting_joins?", "code": "def supports_deleting_joins?\n false\n end", "docstring": "# SQLite does not support deleting from a joined dataset", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L803-L805", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.supports_window_clause?", "code": "def supports_window_clause?\n db.sqlite_version >= 32800\n end", "docstring": "# SQLite 3.28+ supports the WINDOW clause.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L845-L847", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.insert_on_conflict_sql", "code": "def insert_on_conflict_sql(sql)\n if (opts = @opts[:insert_on_conflict])\n sql << \" ON CONFLICT\"\n\n if (target = opts[:constraint])\n sql << \" ON CONSTRAINT \"\n identifier_append(sql, target)\n elsif (target = opts[:target])\n sql << \" \"\n identifier_append(sql, Array(target))\n if (conflict_where = opts[:conflict_where])\n sql << \" WHERE \"\n literal_append(sql, conflict_where)\n end\n end\n\n if (values = opts[:update])\n sql << \" DO UPDATE SET \"\n update_sql_values_hash(sql, values)\n if (update_where = opts[:update_where])\n sql << \" WHERE \"\n literal_append(sql, update_where)\n end\n else\n sql << \" DO NOTHING\"\n end\n end\n end", "docstring": "# Add ON CONFLICT clause if it should be used", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L934-L961", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.literal_blob_append", "code": "def literal_blob_append(sql, v)\n sql << \"X'\" << v.unpack1(\"H*\") << \"'\"\n end", "docstring": "# SQLite uses a preceding X for hex escaping strings", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L964-L966", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods.multi_insert_sql_strategy", "code": "def multi_insert_sql_strategy\n (db.sqlite_version >= 30711) ? :values : :union\n end", "docstring": "# starting in 3.7.11. On older versions, fallback to using a UNION.", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L980-L982", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sequel.Litedb.DatasetMethods._truncate_sql", "code": "def _truncate_sql(table)\n \"DELETE FROM #{table}\"\n end", "docstring": "# SQLite treats a DELETE with no WHERE clause as a TRUNCATE", "url": "https://github.com/oldmoe/litestack/blob/e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf/lib/sequel/adapters/shared/litedb.rb#L1032-L1034", "sha": "e598e1b1f0d46f45df1e2c6213ff9b136b63d9bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Tailscale.set_hostname", "code": "def set_hostname(hostname)\n assert_open\n Error.check self, Libtailscale::TsnetSetHostname(@t, hostname)\n end", "docstring": "# Set the hostname to use for the tailscale node.", "url": "https://github.com/tailscale/libtailscale/blob/9d45e587f0837c9ea0ee55cba08dfa448dc8d480/ruby/lib/tailscale.rb#L196-L199", "sha": "9d45e587f0837c9ea0ee55cba08dfa448dc8d480"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Tailscale.set_auth_key", "code": "def set_auth_key(auth_key)\n assert_open\n Error.check self, Libtailscale::TsnetSetAuthKey(@t, auth_key)\n end", "docstring": "# Set the auth key to use for the tailscale node.", "url": "https://github.com/tailscale/libtailscale/blob/9d45e587f0837c9ea0ee55cba08dfa448dc8d480/ruby/lib/tailscale.rb#L202-L205", "sha": "9d45e587f0837c9ea0ee55cba08dfa448dc8d480"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Tailscale.set_ephemeral", "code": "def set_ephemeral(ephemeral)\n assert_open\n Error.check self, Libtailscale::TsnetSetEphemeral(@t, ephemeral ? 1 : 0)\n end", "docstring": "# Set whether the node is ephemeral or not.", "url": "https://github.com/tailscale/libtailscale/blob/9d45e587f0837c9ea0ee55cba08dfa448dc8d480/ruby/lib/tailscale.rb#L214-L217", "sha": "9d45e587f0837c9ea0ee55cba08dfa448dc8d480"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Tailscale.loopback", "code": "def loopback\n assert_open\n addrbuf = FFI::MemoryPointer.new(:char, 1024)\n proxycredbuf = FFI::MemoryPointer.new(:char, 33)\n localcredbuf = FFI::MemoryPointer.new(:char, 33)\n Error.check self, Libtailscale::TsnetLoopback(@t, addrbuf, addrbuf.size, proxycredbuf, localcredbuf)\n [addrbuf.read_string, proxycredbuf.read_string, localcredbuf.read_string]\n end", "docstring": "# and credentials for using it as LocalAPI or a proxy.", "url": "https://github.com/tailscale/libtailscale/blob/9d45e587f0837c9ea0ee55cba08dfa448dc8d480/ruby/lib/tailscale.rb#L248-L255", "sha": "9d45e587f0837c9ea0ee55cba08dfa448dc8d480"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HomographicSpoofing::Detector::Rule::MixedDigits.read_digits", "code": "def read_digits\n File.read(\"#{__dir__}/data/digits.csv\")\n end", "docstring": "# Built with script/development/generate_digits_characters.rb", "url": "https://github.com/basecamp/homographic_spoofing/blob/eca57ac5f2238a377b9e5201dbffedeb3d8cb5c6/lib/homographic_spoofing/detector/rule/mixed_digits.rb#L27-L29", "sha": "eca57ac5f2238a377b9e5201dbffedeb3d8cb5c6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HomographicSpoofing::Detector::Rule::Idn::ScriptSpecific.latin_spoof?", "code": "def latin_spoof?\n scripts != Set[LATN] && non_ascii_latin_letters.present?\n end", "docstring": "# Note that the non-ASCII Latin check should not be applied when the entire label is made of Latin.", "url": "https://github.com/basecamp/homographic_spoofing/blob/eca57ac5f2238a377b9e5201dbffedeb3d8cb5c6/lib/homographic_spoofing/detector/rule/idn/script_specific.rb#L10-L12", "sha": "eca57ac5f2238a377b9e5201dbffedeb3d8cb5c6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Juno.Client.ReactClient.create", "code": "def create(key, value, ttl: nil)\n juno_request = Juno::Client::JunoRequest.new(key: key,\n value: value,\n version: 0,\n type: Juno::Client::JunoRequest::Type::CREATE,\n time_to_live_s: ttl,\n creation_time: Time.now.to_i)\n process_single(juno_request)\n end", "docstring": "# @see Juno::DefaultProperties::DEFAULT_LIFETIME_S", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/Client/react_client.rb#L56-L64", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Juno.IO.MetadataComponent.set_time_to_live", "code": "def set_time_to_live(ttl)\n ttl = ttl.to_i\n raise ArgumentError, 'TTL should be > 0' unless ttl.positive?\n\n @time_to_live = ttl\n ttl = [ttl].pack(OffsetWidth.UINT32)\n add_field(MetadataField.new(0x01, 0x01, ttl))\n end", "docstring": "# @param ttl [Integer] - Record Time to live", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/IO/MetadataComponent.rb#L102-L109", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Juno.IO.MetadataComponent.set_originator_request_id", "code": "def set_originator_request_id(input_uuid_byte_string = nil)\n @originator_request_id = if input_uuid_byte_string.nil?\n UUIDTools::UUID.random_create\n else\n UUIDTools::UUID.parse_raw(input_uuid_byte_string)\n end\n add_field(MetadataField.new(0x08, 0x03, @originator_request_id.raw))\n @originator_request_id\n end", "docstring": "# if not provided, creates a uuid itself", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/IO/MetadataComponent.rb#L168-L176", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Juno.IO.OperationMessage.size", "code": "def size\n total_size = protocol_header.num_bytes\n total_size += payload_component.num_bytes unless payload_component.nil?\n total_size += metadata_component.num_bytes unless metadata_component.nil?\n total_size\n end", "docstring": "# Calculates size of message", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/IO/OperationMessage.rb#L15-L20", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Juno.IO.PayloadComponent.custom_num_bytes", "code": "def custom_num_bytes\n size = component_size.num_bytes + tag_id.num_bytes + namespace_length.num_bytes + key_length.num_bytes + payload_length.num_bytes + namespace.num_bytes + payload_key.num_bytes\n size += payload_type.num_bytes + payload_data.num_bytes if payload_length.positive?\n size\n end", "docstring": "# to prevent stack overflow", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/IO/PayloadComponent.rb#L103-L107", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Juno.Net.ClientHandler.on_connection_completed", "code": "def on_connection_completed\n # puts \"completed #{Time.now}\"\n end", "docstring": "# Method called when TCP connection estabilished. If useSSL is true, it is called after a successfull ssl handshake", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/Net/client_handler.rb#L50-L52", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Juno.Net.IOProcessor.juno_connect", "code": "def juno_connect(recycle = false)\n return if !recycle && !@channel.nil? && @channel.is_connected?\n\n new_channel = EventMachine.connect(Juno.juno_config.host, Juno.juno_config.port, ClientHandler, self)\n new_channel.pending_connect_timeout = Juno.juno_config.connection_lifetime\n EventMachine::Timer.new(Juno.juno_config.connection_timeout.to_f / 1000) do\n if new_channel.is_connected?\n @LOGGER.info(@PROG_NAME) { \"conncected to #{Juno.juno_config.host}:#{Juno.juno_config.port} \" }\n if recycle\n old_channel = @channel\n @channel = new_channel\n disconnect_channel(old_channel)\n else\n @channel = new_channel\n end\n initiate_bypass_ltm if use_ltm?\n set_recycle_timer\n else\n @recycle_timer&.cancel\n new_channel&.close_connection if !new_channel.nil? && new_channel.is_connected?\n @LOGGER.info(@PROG_NAME) do\n \"Could not conncect to #{Juno.juno_config.host}:#{Juno.juno_config.port}\\n Retrying in #{@reconnect_wait_time.to_f / 1000}ms \"\n end\n EventMachine::Timer.new(@reconnect_wait_time.to_f / 1000) do\n @reconnect_wait_time *= 2\n @reconnect_wait_time = MAX_WAIT_TIME if @reconnect_wait_time > MAX_WAIT_TIME\n @reconnect_wait_time *= (1 + 0.3 * rand)\n juno_connect(recycle)\n end\n end\n end\n end", "docstring": "# @param recycle [Boolean] - True if connection refresh request (optional, default: false)", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/Net/io_processor.rb#L107-L138", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Juno.Net.PingMessage.initialize", "code": "def initialize(app_name = nil, opaque = 0)\n @PROG_NAME = self.class.name\n @LOGGER = Juno::Logger.instance\n app_name = JUNO_INTERNAL_APPNAME if app_name.to_s.empty?\n\n meta_data_component = Juno::IO::MetadataComponent.new\n meta_data_component.set_request_uuid\n meta_data_component.set_source_info(app_name: app_name, ip: IPAddr.new(Juno::Utils.local_ips[0]), port: 0)\n\n protocol_header = Juno::IO::ProtocolHeader.new\n protocol_header.opcode = Juno::IO::ProtocolHeader::OpCodes::Nop\n protocol_header.opaque = opaque\n\n @operation_message = Juno::IO::OperationMessage.new\n @operation_message.metadata_component = meta_data_component\n @operation_message.protocol_header = protocol_header\n end", "docstring": "# @param opaque [Integer] (optional, default: 0)", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/client/Ruby/juno/lib/juno/Net/ping_message.rb#L15-L31", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sync_today_stars", "code": "def sync_today_stars(info, latest_stars)\n today = Time.now.strftime('%Y-%m-%d')\n if info.nil? || !info.include?('_')\n [today, latest_stars, 0]\n else\n date, total_stars, change_stars = info.split('_')\n if date != today\n change_stars = 0\n end\n if latest_stars.to_i == 0\n [today, total_stars, change_stars]\n else\n change_stars = change_stars.to_i + (latest_stars.to_i - total_stars.to_i)\n [today, latest_stars, change_stars]\n end\n end\nend", "docstring": "# cumulate stars changes", "url": "https://github.com/superiorlu/AITreasureBox/blob/4f7e1a1b62066557c5ac95c32818c2082873b9e8/lib/update_readme.rb#L143-L159", "sha": "4f7e1a1b62066557c5ac95c32818c2082873b9e8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "list_encoding_names", "code": "def list_encoding_names\n SUPPORTED_ENCODINGS\n end", "docstring": "# Lists all the encodings that are supported", "url": "https://github.com/IAPark/tiktoken_ruby/blob/ab315180ce105f6e01ed278ff359dfb7f28c7196/lib/tiktoken_ruby.rb#L51-L53", "sha": "ab315180ce105f6e01ed278ff359dfb7f28c7196"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "list_model_names", "code": "def list_model_names\n MODEL_TO_ENCODING_NAME.keys\n end", "docstring": "# Lists all the models that are supported", "url": "https://github.com/IAPark/tiktoken_ruby/blob/ab315180ce105f6e01ed278ff359dfb7f28c7196/lib/tiktoken_ruby.rb#L57-L59", "sha": "ab315180ce105f6e01ed278ff359dfb7f28c7196"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Standard.Rails.Plugin.without_extended_rule_configs", "code": "def without_extended_rule_configs(rules)\n rules.reject { |(name, _)|\n [\"Style/InvertibleUnlessCondition\", \"Lint/SafeNavigationChain\"].include?(name)\n }.to_h\n end", "docstring": "# See: https://github.com/standardrb/standard-rails/issues/25#issuecomment-1881127173", "url": "https://github.com/standardrb/standard-rails/blob/0872d5cf69aba21b125f1a06ad951ce007b6de10/lib/standard/rails/plugin.rb#L55-L59", "sha": "0872d5cf69aba21b125f1a06ad951ce007b6de10"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Standard.Rails.Plugin.without_warnings", "code": "def without_warnings(&blk)\n original_verbose = $VERBOSE\n $VERBOSE = nil\n yield\n ensure\n $VERBOSE = original_verbose\n end", "docstring": "# emitted when we load the cops.", "url": "https://github.com/standardrb/standard-rails/blob/0872d5cf69aba21b125f1a06ad951ce007b6de10/lib/standard/rails/plugin.rb#L85-L91", "sha": "0872d5cf69aba21b125f1a06ad951ce007b6de10"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActiveRecordETL.Pipeline.attributes", "code": "def attributes\n record.attributes\n end", "docstring": "# @return [Hash] the record's attributes", "url": "https://github.com/hopsoft/universalid/blob/4294d1171b67510c85dc05e0cfd02353adc58614/test/rails_kit/models/active_record_etl.rb#L40-L42", "sha": "4294d1171b67510c85dc05e0cfd02353adc58614"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActiveRecordETL.Pipeline.loaded_nested_attribute_names", "code": "def loaded_nested_attribute_names\n nested_attribute_names & loaded_has_many_associations_by_name.keys\n end", "docstring": "# @return [Array]", "url": "https://github.com/hopsoft/universalid/blob/4294d1171b67510c85dc05e0cfd02353adc58614/test/rails_kit/models/active_record_etl.rb#L89-L91", "sha": "4294d1171b67510c85dc05e0cfd02353adc58614"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActiveRecordETL.Pipeline.parent_attribute_names", "code": "def parent_attribute_names\n record.class.reflections.each_with_object([]) do |(name, reflection), memo|\n memo << reflection.foreign_key if reflection.macro == :belongs_to\n end\n end", "docstring": "# @return [Array]", "url": "https://github.com/hopsoft/universalid/blob/4294d1171b67510c85dc05e0cfd02353adc58614/test/rails_kit/models/active_record_etl.rb#L96-L100", "sha": "4294d1171b67510c85dc05e0cfd02353adc58614"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActiveRecordETL.Pipeline.transform", "code": "def transform(format: :json, **options)\n case format\n # when :json then extract(**options).to_json\n when :json then Oj.dump extract(**options), symbol_keys: false\n else raise NotImplementedError\n end\n end", "docstring": "# @raise [NotImplementedError] if the specified format is not supported", "url": "https://github.com/hopsoft/universalid/blob/4294d1171b67510c85dc05e0cfd02353adc58614/test/rails_kit/models/active_record_etl.rb#L145-L151", "sha": "4294d1171b67510c85dc05e0cfd02353adc58614"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BasicTest.MessageContainerTest.test_to_h", "code": "def test_to_h\n m = TestMessage.new(:optional_bool => true, :optional_double => -10.100001, :optional_string => 'foo', :repeated_string => ['bar1', 'bar2'], :repeated_msg => [TestMessage2.new(:foo => 100)])\n expected_result = {\n :optional_bool=>true,\n :optional_bytes=>\"\",\n :optional_double=>-10.100001,\n :optional_enum=>:Default,\n :optional_float=>0.0,\n :optional_int32=>0,\n :optional_int64=>0,\n :optional_msg=>nil,\n :optional_string=>\"foo\",\n :optional_uint32=>0,\n :optional_uint64=>0,\n :repeated_bool=>[],\n :repeated_bytes=>[],\n :repeated_double=>[],\n :repeated_enum=>[],\n :repeated_float=>[],\n :repeated_int32=>[],\n :repeated_int64=>[],\n :repeated_msg=>[{:foo => 100}],\n :repeated_string=>[\"bar1\", \"bar2\"],\n :repeated_uint32=>[],\n :repeated_uint64=>[]\n }\n assert_equal expected_result, m.to_h\n\n m = MapMessage.new(\n :map_string_int32 => {\"a\" => 1, \"b\" => 2},\n :map_string_msg => {\"a\" => TestMessage2.new(:foo => 1),\n \"b\" => TestMessage2.new(:foo => 2)},\n :map_string_enum => {\"a\" => :A, \"b\" => :B})\n expected_result = {\n :map_string_int32 => {\"a\" => 1, \"b\" => 2},\n :map_string_msg => {\"a\" => {:foo => 1}, \"b\" => {:foo => 2}},\n :map_string_enum => {\"a\" => :A, \"b\" => :B}\n }\n assert_equal expected_result, m.to_h\n end", "docstring": "#end", "url": "https://github.com/srivatsankrishnan/oss-arch-gym/blob/fab6d1442541b5cdf40daf24e64e63261da2d846/sims/AstraSim/protobuf-3.12.4/ruby/tests/basic.rb#L435-L474", "sha": "fab6d1442541b5cdf40daf24e64e63261da2d846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.initialize", "code": "def initialize(\n llm:,\n tools: [],\n instructions: nil,\n tool_choice: \"auto\",\n parallel_tool_calls: true,\n messages: [],\n # Callbacks\n add_message_callback: nil,\n tool_execution_callback: nil,\n &block\n )\n unless tools.is_a?(Array) && tools.all? { |tool| tool.class.singleton_class.included_modules.include?(Langchain::ToolDefinition) }\n raise ArgumentError, \"Tools must be an array of objects extending Langchain::ToolDefinition\"\n end\n\n @llm = llm\n @llm_adapter = LLM::Adapter.build(llm)\n\n @add_message_callback = add_message_callback if validate_callback!(\"add_message_callback\", add_message_callback)\n @tool_execution_callback = tool_execution_callback if validate_callback!(\"tool_execution_callback\", tool_execution_callback)\n\n self.messages = messages\n @tools = tools\n @parallel_tool_calls = parallel_tool_calls\n self.tool_choice = tool_choice\n self.instructions = instructions\n @block = block\n @state = :ready\n\n @total_prompt_tokens = 0\n @total_completion_tokens = 0\n @total_tokens = 0\n end", "docstring": "# @param tool_execution_callback [Proc] A callback function (Proc or lambda) that is called right before a tool function is executed", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L40-L73", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.add_messages", "code": "def add_messages(messages:)\n messages.each do |message_hash|\n add_message(**message_hash.slice(:content, :role, :tool_calls, :tool_call_id))\n end\n end", "docstring": "# @return [Array] The messages", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L125-L129", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.run!", "code": "def run!\n run(auto_tool_execution: true)\n end", "docstring": "# @return [Array] The messages", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L151-L153", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.replace_system_message!", "code": "def replace_system_message!(content:)\n messages.delete_if(&:system?)\n return if content.nil?\n\n message = build_message(role: \"system\", content: content)\n messages.unshift(message)\n end", "docstring": "# @return [Array] The messages", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L219-L225", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.validate_tool_choice!", "code": "def validate_tool_choice!(tool_choice)\n allowed_tool_choices = llm_adapter.allowed_tool_choices.concat(available_tool_names)\n unless allowed_tool_choices.include?(tool_choice)\n raise ArgumentError, \"Tool choice must be one of: #{allowed_tool_choices.join(\", \")}\"\n end\n end", "docstring": "# TODO: If tool_choice = \"tool_function_name\" and then tool is removed from the assistant, should we set tool_choice back to \"auto\"?", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L228-L233", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.handle_system_message", "code": "def handle_system_message\n Langchain.logger.warn(\"#{self.class} - At least one user message is required after a system message\")\n :completed\n end", "docstring": "# @return [Symbol] The completed state", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L279-L282", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.handle_unexpected_message", "code": "def handle_unexpected_message\n Langchain.logger.error(\"#{self.class} - Unexpected message role encountered: #{messages.last.standard_role}\")\n :failed\n end", "docstring": "# @return [Symbol] The failed state", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L294-L297", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.handle_user_or_tool_message", "code": "def handle_user_or_tool_message\n response = chat_with_llm\n\n add_message(role: response.role, content: response.chat_completion, tool_calls: response.tool_calls)\n record_used_tokens(response.prompt_tokens, response.completion_tokens, response.total_tokens)\n\n set_state_for(response: response)\n end", "docstring": "# @return [Symbol] The next state", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L302-L309", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.run_tools", "code": "def run_tools(tool_calls)\n # Iterate over each function invocation and submit tool output\n tool_calls.each do |tool_call|\n run_tool(tool_call)\n end\n end", "docstring": "# @param tool_calls [Array] The tool calls to run", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant.rb#L354-L359", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::ToolDefinition.ParameterBuilder.build", "code": "def build(&block)\n instance_eval(&block)\n @schema\n end", "docstring": "# @return [Hash] The built schema", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool_definition.rb#L152-L155", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::ToolDefinition.ParameterBuilder.validate_parameters", "code": "def validate_parameters(name:, type:, enum:, required:)\n if @parent_type == \"object\"\n if name.nil?\n raise ArgumentError, \"Name must be provided for properties of an object\"\n end\n unless name.is_a?(Symbol)\n raise ArgumentError, \"Invalid name '#{name}'. Name must be a symbol\"\n end\n end\n\n unless VALID_TYPES.include?(type)\n raise ArgumentError, \"Invalid type '#{type}'. Valid types are: #{VALID_TYPES.join(\", \")}\"\n end\n\n unless enum.nil? || enum.is_a?(Array)\n raise ArgumentError, \"Invalid enum '#{enum}'. Enum must be nil or an array\"\n end\n\n unless [true, false].include?(required)\n raise ArgumentError, \"Invalid required '#{required}'. Required must be a boolean\"\n end\n end", "docstring": "# @raise [ArgumentError] If any parameter is invalid", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool_definition.rb#L208-L229", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.ToolResponse.initialize", "code": "def initialize(content: nil, image_url: nil)\n raise ArgumentError, \"Either content or image_url must be provided\" if content.nil? && image_url.nil?\n\n @content = content\n @image_url = image_url\n end", "docstring": "# @param image_url [String, nil] Optional URL to an image.", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool_response.rb#L13-L18", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.LLM.Adapters.Base.tool_role", "code": "def tool_role\n raise NotImplementedError, \"Subclasses must implement tool_role\"\n end", "docstring": "# @return [String] The tool role", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/base.rb#L56-L58", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.LLM.Adapters.MistralAI.build_message", "code": "def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)\n Messages::MistralAIMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)\n end", "docstring": "# @return [Messages::MistralAIMessage] The Mistral AI message", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/mistral_ai.rb#L41-L43", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.LLM.Adapters.MistralAI.extract_tool_call_args", "code": "def extract_tool_call_args(tool_call:)\n tool_call_id = tool_call.dig(\"id\")\n\n function_name = tool_call.dig(\"function\", \"name\")\n tool_name, method_name = function_name.split(\"__\")\n\n tool_arguments = tool_call.dig(\"function\", \"arguments\")\n tool_arguments = if tool_arguments.is_a?(Hash)\n Langchain::Utils::HashTransformer.symbolize_keys(tool_arguments)\n else\n JSON.parse(tool_arguments, symbolize_names: true)\n end\n\n [tool_call_id, tool_name, method_name, tool_arguments]\n end", "docstring": "# @return [Array] The tool call information", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/mistral_ai.rb#L49-L63", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.LLM.Adapters.MistralAI.available_tool_names", "code": "def available_tool_names(tools)\n build_tools(tools).map { |tool| tool.dig(:function, :name) }\n end", "docstring": "# Get the available tool names for Mistral AI", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/mistral_ai.rb#L76-L78", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.LLM.Adapters.Ollama.build_message", "code": "def build_message(role:, content: nil, image_url: nil, tool_calls: [], tool_call_id: nil)\n Messages::OllamaMessage.new(role: role, content: content, image_url: image_url, tool_calls: tool_calls, tool_call_id: tool_call_id)\n end", "docstring": "# @return [Messages::OllamaMessage] The Ollama message", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/ollama.rb#L41-L43", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.LLM.Adapters.Ollama.available_tool_names", "code": "def available_tool_names(tools)\n build_tools(tools).map { |tool| tool.dig(:function, :name) }\n end", "docstring": "# Build the tools for the Ollama LLM", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/llm/adapters/ollama.rb#L66-L68", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.Messages.AnthropicMessage.initialize", "code": "def initialize(\n role:,\n content: nil,\n image_url: nil,\n tool_calls: [],\n tool_call_id: nil\n )\n raise ArgumentError, \"Role must be one of #{ROLES.join(\", \")}\" unless ROLES.include?(role)\n raise ArgumentError, \"Tool calls must be an array of hashes\" unless tool_calls.is_a?(Array) && tool_calls.all? { |tool_call| tool_call.is_a?(Hash) }\n\n @role = role\n # Some Tools return content as a JSON hence `.to_s`\n @content = content.to_s\n @image_url = image_url\n @tool_calls = tool_calls\n @tool_call_id = tool_call_id\n end", "docstring": "# @param tool_call_id [String] The ID of the tool call", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/anthropic_message.rb#L21-L37", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.Messages.MistralAIMessage.assistant_hash", "code": "def assistant_hash\n {\n role: \"assistant\",\n content: content,\n tool_calls: tool_calls,\n prefix: false\n }\n end", "docstring": "# @return [Hash] The message as an MistralAI API-compatible hash, with the role as \"assistant\"", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/mistral_ai_message.rb#L82-L89", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.Messages.MistralAIMessage.build_content_array", "code": "def build_content_array\n content_details = []\n\n if content && !content.empty?\n content_details << {\n type: \"text\",\n text: content\n }\n end\n\n if image_url\n content_details << {\n type: \"image_url\",\n image_url: image_url\n }\n end\n\n content_details\n end", "docstring": "# @return [Array] An array of content hashes, with keys :type and :text or :image_url.", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/mistral_ai_message.rb#L121-L139", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.Messages.OllamaMessage.system?", "code": "def system?\n role == \"system\"\n end", "docstring": "# @return [Boolean] true/false whether this message are system instructions", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/ollama_message.rb#L67-L69", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Assistant.Messages.OpenAIMessage.tool?", "code": "def tool?\n role == \"tool\"\n end", "docstring": "# @return [Boolean] true/false whether this message is a tool call", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/assistant/messages/openai_message.rb#L81-L83", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Chunker.Markdown.chunks", "code": "def chunks\n splitter = Baran::MarkdownSplitter.new(\n chunk_size: chunk_size,\n chunk_overlap: chunk_overlap\n )\n\n splitter.chunks(text).map do |chunk|\n Langchain::Chunk.new(text: chunk[:text])\n end\n end", "docstring": "# @return [Array]", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/chunker/markdown.rb#L25-L34", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Chunker.RecursiveText.chunks", "code": "def chunks\n splitter = Baran::RecursiveCharacterTextSplitter.new(\n chunk_size: chunk_size,\n chunk_overlap: chunk_overlap,\n separators: separators\n )\n\n splitter.chunks(text).map do |chunk|\n Langchain::Chunk.new(text: chunk[:text])\n end\n end", "docstring": "# @return [Array]", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/chunker/recursive_text.rb#L26-L36", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Evals.Ragas.AnswerRelevance.initialize", "code": "def initialize(llm:, batch_size: 3)\n @llm = llm\n @batch_size = batch_size\n end", "docstring": "# @param batch_size [Integer] Batch size, i.e., number of generated questions to compare to the original question", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/evals/ragas/answer_relevance.rb#L15-L18", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Evals.Ragas.Main.answer_relevance", "code": "def answer_relevance\n @answer_relevance ||= Langchain::Evals::Ragas::AnswerRelevance.new(llm: llm)\n end", "docstring": "# @return [Langchain::Evals::Ragas::AnswerRelevance] Class instance", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/evals/ragas/main.rb#L54-L56", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::LLM.Base.chat", "code": "def chat(...)\n raise NotImplementedError, \"#{self.class.name} does not support chat\"\n end", "docstring": "# @raise NotImplementedError if not supported by the LLM", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/base.rb#L48-L50", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::LLM.Base.chat_parameters", "code": "def chat_parameters(params = {})\n @chat_parameters ||= Langchain::LLM::Parameters::Chat.new(\n parameters: params\n )\n end", "docstring": "#", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/base.rb#L81-L85", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::LLM.Cohere.summarize", "code": "def summarize(text:)\n response = client.summarize(text: text)\n response.dig(\"summary\")\n end", "docstring": "# @return [String] The summary", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/cohere.rb#L116-L119", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::LLM.Ollama.initialize", "code": "def initialize(url: \"http://localhost:11434\", api_key: nil, default_options: {})\n depends_on \"faraday\"\n @url = url\n @api_key = api_key\n @defaults = DEFAULTS.merge(default_options)\n chat_parameters.update(\n model: {default: @defaults[:chat_model]},\n temperature: {default: @defaults[:temperature]},\n template: {},\n stream: {default: false},\n response_format: {default: @defaults[:response_format]},\n options: {default: @defaults[:options]}\n )\n chat_parameters.remap(response_format: :format)\n end", "docstring": "#", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/ollama.rb#L40-L54", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::LLM.Ollama.complete", "code": "def complete(\n prompt:,\n model: defaults[:completion_model],\n images: nil,\n format: nil,\n system: nil,\n template: nil,\n context: nil,\n raw: nil,\n mirostat: nil,\n mirostat_eta: nil,\n mirostat_tau: nil,\n num_ctx: nil,\n num_gqa: nil,\n num_gpu: nil,\n num_thread: nil,\n repeat_last_n: nil,\n repeat_penalty: nil,\n temperature: defaults[:temperature],\n seed: nil,\n stop: nil,\n tfs_z: nil,\n num_predict: nil,\n top_k: nil,\n top_p: nil,\n stop_sequences: nil,\n &block\n )\n if stop_sequences\n stop = stop_sequences\n end\n\n parameters = {\n prompt: prompt,\n model: model,\n images: images,\n format: format,\n system: system,\n template: template,\n context: context,\n stream: block_given?, # rubocop:disable Performance/BlockGivenWithExplicitBlock\n raw: raw\n }.compact\n\n llm_parameters = {\n mirostat: mirostat,\n mirostat_eta: mirostat_eta,\n mirostat_tau: mirostat_tau,\n num_ctx: num_ctx,\n num_gqa: num_gqa,\n num_gpu: num_gpu,\n num_thread: num_thread,\n repeat_last_n: repeat_last_n,\n repeat_penalty: repeat_penalty,\n temperature: temperature,\n seed: seed,\n stop: stop,\n tfs_z: tfs_z,\n num_predict: num_predict,\n top_k: top_k,\n top_p: top_p\n }\n\n parameters[:options] = llm_parameters.compact\n responses_stream = []\n\n client.post(\"api/generate\", parameters) do |req|\n req.options.on_data = json_responses_chunk_handler do |parsed_chunk|\n responses_stream << parsed_chunk\n\n block&.call(OllamaResponse.new(parsed_chunk, model: parameters[:model]))\n end\n end\n\n generate_final_completion_response(responses_stream, parameters[:model])\n end", "docstring": "#", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/ollama.rb#L81-L156", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395", "code/function": "def complete(\n prompt:,\n model: defaults[:completion_model],\n images: nil,\n format: nil,\n system: nil,\n template: nil,\n context: nil,\n raw: nil,\n mirostat: nil,\n mirostat_eta: nil,\n mirostat_tau: nil,\n num_ctx: nil,\n num_gqa: nil,\n num_gpu: nil,\n num_thread: nil,\n repeat_last_n: nil,\n repeat_penalty: nil,\n temperature: defaults[:temperature],\n seed: nil,\n stop: nil,\n tfs_z: nil,\n num_predict: nil,\n top_k: nil,\n top_p: nil,\n stop_sequences: nil,\n &block\n )\n if stop_sequences\n stop = stop_sequences\n end\n\n parameters = {\n prompt: prompt,\n model: model,\n images: images,\n format: format,\n system: system,\n template: template,\n context: context,\n stream: block_given?, rubocop:disable Performance/BlockGivenWithExplicitBlock\n raw: raw\n }.compact\n\n llm_parameters = {\n mirostat: mirostat,\n mirostat_eta: mirostat_eta,\n mirostat_tau: mirostat_tau,\n num_ctx: num_ctx,\n num_gqa: num_gqa,\n num_gpu: num_gpu,\n num_thread: num_thread,\n repeat_last_n: repeat_last_n,\n repeat_penalty: repeat_penalty,\n temperature: temperature,\n seed: seed,\n stop: stop,\n tfs_z: tfs_z,\n num_predict: num_predict,\n top_k: top_k,\n top_p: top_p\n }\n\n parameters[:options] = llm_parameters.compact\n responses_stream = []\n\n client.post(\"api/generate\", parameters) do |req|\n req.options.on_data = json_responses_chunk_handler do |parsed_chunk|\n responses_stream << parsed_chunk\n\n block&.call(OllamaResponse.new(parsed_chunk, model: parameters[:model]))\n end\n end\n\n generate_final_completion_response(responses_stream, parameters[:model])\n end"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.LLM.BaseResponse.chat_completion", "code": "def chat_completion\n raise NotImplementedError\n end", "docstring": "#", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/llm/response/base_response.rb#L35-L37", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::OutputParsers.OutputFixingParser.get_format_instructions", "code": "def get_format_instructions\n parser.get_format_instructions\n end", "docstring": "# according to the @schema.", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/output_parsers/output_fixing_parser.rb#L35-L37", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::OutputParsers.StructuredOutputParser.initialize", "code": "def initialize(schema:)\n @schema = validate_schema!(schema)\n end", "docstring": "# @param schema [JSON::Schema] The json schema", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/output_parsers/structured_output_parser.rb#L13-L15", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Processors.Eml.clean_content", "code": "def clean_content(content)\n content\n .gsub(/\\[cid:[^\\]]+\\]/, \"\") # Remove embedded image references\n .gsub(URI::DEFAULT_PARSER.make_regexp(%w[http https])) { |match| \"<#{match}>\" } # Format URLs\n .gsub(/\\r\\n?/, \"\\n\") # Normalize line endings to Unix style\n .gsub(/[\\u200B-\\u200D\\uFEFF]/, \"\") # Remove zero width spaces and similar characters\n .gsub(/<\\/?[^>]+>/, \"\") # Remove any HTML tags that might have sneaked in\n end", "docstring": "# Clean and format the extracted content", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/processors/eml.rb#L54-L61", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Processors.Markdown.parse", "code": "def parse(data)\n data.read\n end", "docstring": "# @return [String]", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/processors/markdown.rb#L12-L14", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Prompt.PromptTemplate.initialize", "code": "def initialize(template:, input_variables:, validate_template: true)\n @template = template\n @input_variables = input_variables\n @validate_template = validate_template\n\n validate(template: @template, input_variables: @input_variables) if @validate_template\n end", "docstring": "#", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/prompt/prompt_template.rb#L45-L51", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Tool.Database.describe_table", "code": "def describe_table(table)\n # TODO: There's probably a clear way to do all of this below\n\n primary_key_columns = []\n primary_key_column_count = db.schema(table).count { |column| column[1][:primary_key] == true }\n\n schema = \"CREATE TABLE #{table}(\\n\"\n db.schema(table).each do |column|\n schema << \"#{column[0]} #{column[1][:type]}\"\n if column[1][:primary_key] == true\n schema << \" PRIMARY KEY\" if primary_key_column_count == 1\n else\n primary_key_columns << column[0]\n end\n schema << \" COMMENT '#{column[1][:comment]}'\" if column[1][:comment]\n schema << \",\\n\" unless column == db.schema(table).last && primary_key_column_count == 1\n end\n if primary_key_column_count > 1\n schema << \"PRIMARY KEY (#{primary_key_columns.join(\",\")})\"\n end\n db.foreign_key_list(table).each do |fk|\n schema << \",\\n\" if fk == db.foreign_key_list(table).first\n schema << \"FOREIGN KEY (#{fk[:columns]&.first}) REFERENCES #{fk[:table]}(#{fk[:key]&.first})\"\n schema << \",\\n\" unless fk == db.foreign_key_list(table).last\n end\n schema << \");\\n\"\n\n tool_response(content: schema)\n end", "docstring": "# @return [Langchain::Tool::Response] The schema for the table", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool/database.rb#L107-L135", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Tool.Tavily.search", "code": "def search(\n query:,\n search_depth: \"basic\",\n include_images: false,\n include_answer: false,\n include_raw_content: false,\n max_results: 5,\n include_domains: [],\n exclude_domains: []\n )\n uri = URI(\"https://api.tavily.com/search\")\n request = Net::HTTP::Post.new(uri)\n request.content_type = \"application/json\"\n request.body = {\n api_key: @api_key,\n query: query,\n search_depth: search_depth,\n include_images: include_images,\n include_answer: include_answer,\n include_raw_content: include_raw_content,\n max_results: max_results,\n include_domains: include_domains,\n exclude_domains: exclude_domains\n }.to_json\n\n response = Net::HTTP.start(uri.hostname, uri.port, use_ssl: uri.scheme == \"https\") do |http|\n http.request(request)\n end\n tool_response(content: response.body)\n end", "docstring": "# @return [Langchain::Tool::Response] The search results in JSON format.", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/tool/tavily.rb#L45-L74", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain.Utils.CosineSimilarity.calculate_similarity", "code": "def calculate_similarity\n return nil unless vector_a.is_a? Array\n return nil unless vector_b.is_a? Array\n return nil if vector_a.size != vector_b.size\n\n dot_product = 0\n vector_a.zip(vector_b).each do |v1i, v2i|\n dot_product += v1i * v2i\n end\n\n a = vector_a.map { |n| n**2 }.reduce(:+)\n b = vector_b.map { |n| n**2 }.reduce(:+)\n\n dot_product / (Math.sqrt(a) * Math.sqrt(b))\n end", "docstring": "# @return [Float] The cosine similarity between the two vectors", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/utils/cosine_similarity.rb#L17-L31", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Base.update_texts", "code": "def update_texts(...)\n raise NotImplementedError, \"#{self.class.name} does not support updating texts\"\n end", "docstring": "# Method supported by Vectorsearch DB to update a list of texts to the index", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/base.rb#L123-L125", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Base.similarity_search_by_vector", "code": "def similarity_search_by_vector(...)\n raise NotImplementedError, \"#{self.class.name} does not support similarity search by vector\"\n end", "docstring": "# You must generate your own vector using the same LLM that generated the embeddings stored in the Vectorsearch DB.", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/base.rb#L150-L152", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Base.ask", "code": "def ask(...)\n raise NotImplementedError, \"#{self.class.name} does not support asking questions\"\n end", "docstring": "# Method supported by Vectorsearch DB to answer a question given a context (data) pulled from your Vectorsearch DB.", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/base.rb#L155-L157", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Chroma.similarity_search", "code": "def similarity_search(\n query:,\n k: 4\n )\n embedding = llm.embed(text: query).embedding\n\n similarity_search_by_vector(\n embedding: embedding,\n k: k\n )\n end", "docstring": "# @return [Chroma::Resources::Embedding] The response from the server", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/chroma.rb#L94-L104", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Chroma.ask", "code": "def ask(question:, k: 4, &block)\n search_results = similarity_search(query: question, k: k)\n\n context = search_results.map do |result|\n result.document\n end\n\n context = context.join(\"\\n---\\n\")\n\n prompt = generate_rag_prompt(question: question, context: context)\n\n messages = [{role: \"user\", content: prompt}]\n response = llm.chat(messages: messages, &block)\n\n response.context = context\n response\n end", "docstring": "# @return [String] The answer to the question", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/chroma.rb#L128-L144", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Elasticsearch.delete_default_schema", "code": "def delete_default_schema\n es_client.indices.delete(\n index: index_name\n )\n end", "docstring": "# @return [Elasticsearch::Response] Index deletion", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/elasticsearch.rb#L100-L104", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Elasticsearch.similarity_search_by_vector", "code": "def similarity_search_by_vector(embedding: [], k: 10, query: {})\n if embedding.empty? && query.empty?\n raise \"Either embedding or query should pass as an argument\"\n end\n\n query = default_query(embedding) if query.empty?\n\n es_client.search(body: {query: query, size: k}).body\n end", "docstring": "# @return [Elasticsearch::Response] The response from the server", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/elasticsearch.rb#L186-L194", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Epsilla.add_texts", "code": "def add_texts(texts:, ids: nil)\n validated_ids = ids\n if ids.nil?\n validated_ids = texts.map { SecureRandom.uuid }\n elsif ids.length != texts.length\n raise \"The number of ids must match the number of texts\"\n end\n\n data = texts.map.with_index do |text, idx|\n {Doc: text, Embedding: llm.embed(text: text).embedding, ID: validated_ids[idx]}\n end\n\n status_code, response = @client.database.insert(@table_name, data)\n raise \"Failed to insert texts: #{response}\" if status_code != 200\n JSON.parse(response)\n end", "docstring": "# @param ids [Array] The unique ids to add to the index, in the same order as the texts; if nil, it will be random uuids", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/epsilla.rb#L83-L98", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Milvus.get_default_schema", "code": "def get_default_schema\n client.collections.describe(collection_name: index_name)\n end", "docstring": "# @return [Hash] The response from the server", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/milvus.rb#L102-L104", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Pgvector.remove_texts", "code": "def remove_texts(ids:)\n @db[table_name.to_sym].where(id: ids).delete\n end", "docstring": "# @return [Integer] The number of texts removed from the index", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/pgvector.rb#L96-L98", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Qdrant.find", "code": "def find(ids: [])\n client.points.get_all(\n collection_name: index_name,\n ids: ids,\n with_payload: true,\n with_vector: true\n )\n end", "docstring": "# @return [Hash] The response from the server", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/qdrant.rb#L36-L43", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Qdrant.remove_texts", "code": "def remove_texts(ids:)\n client.points.delete(\n collection_name: index_name,\n points: ids\n )\n end", "docstring": "# @return [Hash] The response from the server", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/qdrant.rb#L71-L76", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Qdrant.get_default_schema", "code": "def get_default_schema\n client.collections.get(collection_name: index_name)\n end", "docstring": "# @return [Hash] The response from the server", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/qdrant.rb#L80-L82", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Qdrant.similarity_search_by_vector", "code": "def similarity_search_by_vector(\n embedding:,\n k: 4\n )\n response = client.points.search(\n collection_name: index_name,\n limit: k,\n vector: embedding,\n with_payload: true,\n with_vector: true\n )\n response.dig(\"result\")\n end", "docstring": "# @return [Hash] The response from the server", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/qdrant.rb#L122-L134", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Weaviate.similarity_search_by_vector", "code": "def similarity_search_by_vector(embedding:, k: 4)\n near_vector = \"{ vector: #{embedding} }\"\n\n client.query.get(\n class_name: index_name,\n near_vector: near_vector,\n limit: k.to_s,\n fields: \"__id content _additional { id }\"\n )\n end", "docstring": "# @return [Hash] The search results", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/weaviate.rb#L131-L140", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Langchain::Vectorsearch.Weaviate.ask", "code": "def ask(question:, k: 4, &block)\n search_results = similarity_search(query: question, k: k)\n\n context = search_results.map do |result|\n result.dig(\"content\").to_s\n end\n context = context.join(\"\\n---\\n\")\n\n prompt = generate_rag_prompt(question: question, context: context)\n\n messages = [{role: \"user\", content: prompt}]\n response = llm.chat(messages: messages, &block)\n\n response.context = context\n response\n end", "docstring": "# @return [Hash] The answer", "url": "https://github.com/patterns-ai-core/langchainrb/blob/0d9f46ee75cc1556c753d60bf574dc3956e23395/lib/langchain/vectorsearch/weaviate.rb#L147-L162", "sha": "0d9f46ee75cc1556c753d60bf574dc3956e23395"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExampleService.BulkDeleteAsync", "code": "public async Task BulkDeleteAsync(BulkReq req)\n {\n req.ThrowIfInvalid();\n var ret = 0;\n\n // ReSharper disable once LoopCanBeConvertedToQuery\n foreach (var item in req.Items) {\n ret += await DeleteAsync(item).ConfigureAwait(false);\n }\n\n return ret;\n }", "docstring": "/// ", "url": "https://github.com/nsnail/NetAdmin/blob/72cc3c42fded6f2d5a8cc43244746493c346d8f8/src/backend/NetAdmin/NetAdmin.Application/Services/Tpl/ExampleService.cs#L15-L26", "sha": "72cc3c42fded6f2d5a8cc43244746493c346d8f8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IApplicationBuilderExtensions.UseEndpoints", "code": "public static IApplicationBuilder UseEndpoints(this IApplicationBuilder me)\n {\n return me.UseEndpoints(endpoints => {\n _ = endpoints.MapControllers();\n #if !DEBUG\n _ = endpoints.MapMetrics();\n #endif\n });\n }", "docstring": "/// ", "url": "https://github.com/nsnail/NetAdmin/blob/72cc3c42fded6f2d5a8cc43244746493c346d8f8/src/backend/NetAdmin/NetAdmin.Host/Extensions/IApplicationBuilderExtensions.cs#L22-L30", "sha": "72cc3c42fded6f2d5a8cc43244746493c346d8f8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RemoveDeprecatedServerHttpOptions.BuildTargetModel", "code": "protected override void BuildTargetModel(ModelBuilder modelBuilder)\n {\n#pragma warning disable 612, 618\n modelBuilder\n .HasAnnotation(\"ProductVersion\", \"7.0.13\")\n .HasAnnotation(\"Proxies:ChangeTracking\", false)\n .HasAnnotation(\"Proxies:CheckEquality\", false)\n .HasAnnotation(\"Proxies:LazyLoading\", true);\n\n modelBuilder.Entity(\"CategoryGame\", b =>\n {\n b.Property(\"CategoriesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"CategoriesId\", \"GamesId\");\n\n b.HasIndex(\"GamesId\");\n\n b.ToTable(\"CategoryGame\");\n });\n\n modelBuilder.Entity(\"GameDeveloper\", b =>\n {\n b.Property(\"DeveloperId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"DeveloperId\", \"GameId\");\n\n b.HasIndex(\"GameId\");\n\n b.ToTable(\"GameDeveloper\");\n });\n\n modelBuilder.Entity(\"GameGenre\", b =>\n {\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GenresId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GamesId\", \"GenresId\");\n\n b.HasIndex(\"GenresId\");\n\n b.ToTable(\"GameGenre\");\n });\n\n modelBuilder.Entity(\"GamePublisher\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PublisherId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"PublisherId\");\n\n b.HasIndex(\"PublisherId\");\n\n b.ToTable(\"GamePublisher\");\n });\n\n modelBuilder.Entity(\"GameRedistributable\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"RedistributableId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.ToTable(\"GameRedistributable\");\n });\n\n modelBuilder.Entity(\"GameTag\", b =>\n {\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"TagsId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GamesId\", \"TagsId\");\n\n b.HasIndex(\"TagsId\");\n\n b.ToTable(\"GameTag\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Action\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Arguments\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PrimaryAction\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"SortOrder\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"WorkingDirectory\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Actions\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Archive\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Changelog\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CompressedSize\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LastVersionId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ObjectKey\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UncompressedSize\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Version\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"LastVersionId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Archive\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Category\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ParentId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ParentId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Categories\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Company\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Companies\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Game\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"DirectoryName\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"IGDBId\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Notes\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ReleasedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Singleplayer\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"SortTitle\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Title\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ValidKeyRegex\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Games\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.GameSave\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"GameSaves\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Genre\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Genres\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Key\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"AllocationMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimedByComputerName\")\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByIpv4Address\")\n .HasMaxLength(15)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByMacAddress\")\n .HasMaxLength(17)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByUserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .IsRequired()\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"ClaimedByUserId\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Keys\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Media\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"FileId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"MimeType\")\n .IsRequired()\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SourceUrl\")\n .IsRequired()\n .HasMaxLength(2048)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Media\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.MultiplayerMode\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"MaxPlayers\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"MinPlayers\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"NetworkProtocol\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Spectators\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"MultiplayerModes\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Redistributable\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Notes\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Redistributables\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Role\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ConcurrencyStamp\")\n .IsConcurrencyToken()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"NormalizedName\")\n .IsUnique()\n .HasDatabaseName(\"RoleNameIndex\");\n\n b.ToTable(\"AspNetRoles\", (string)null);\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.SavePath\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"SavePaths\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Script\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Contents\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RequiresAdmin\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Scripts\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Server\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Arguments\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Autostart\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"AutostartDelay\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"OnStartScriptPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"OnStopScriptPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UseShellExecute\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"WorkingDirectory\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Servers\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.ServerConsole\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Host\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Password\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Port\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ServerId\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId1\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"ServerId1\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"ServerConsoles\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.ServerHttpPath\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LocalPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId1\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"ServerId1\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"ServerHttpPath\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Tag\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Tags\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.User\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"AccessFailedCount\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Alias\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Approved\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ApprovedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ConcurrencyStamp\")\n .IsConcurrencyToken()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Email\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"EmailConfirmed\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"LockoutEnabled\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"LockoutEnd\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedEmail\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedUserName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PasswordHash\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PhoneNumber\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PhoneNumberConfirmed\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"RefreshToken\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RefreshTokenExpiration\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SecurityStamp\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"TwoFactorEnabled\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UserName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"NormalizedEmail\")\n .HasDatabaseName(\"EmailIndex\");\n\n b.HasIndex(\"NormalizedUserName\")\n .IsUnique()\n .HasDatabaseName(\"UserNameIndex\");\n\n b.ToTable(\"AspNetUsers\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRoleClaim\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimType\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimValue\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"AspNetRoleClaims\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserClaim\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimType\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimValue\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"AspNetUserClaims\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserLogin\", b =>\n {\n b.Property(\"LoginProvider\")\n .HasMaxLength(128)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ProviderKey\")\n .HasMaxLength(128)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ProviderDisplayName\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"LoginProvider\", \"ProviderKey\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"AspNetUserLogins\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserRole\", b =>\n {\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"UserId\", \"RoleId\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"AspNetUserRoles\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserToken\", b =>\n {\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LoginProvider\")\n .HasMaxLength(128)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .HasMaxLength(128)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"UserId\", \"LoginProvider\", \"Name\");\n\n b.ToTable(\"AspNetUserTokens\", (string)null);\n });\n\n modelBuilder.Entity(\"CategoryGame\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.Category\", null)\n .WithMany()\n .HasForeignKey(\"CategoriesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameDeveloper\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.Company\", null)\n .WithMany()\n .HasForeignKey(\"DeveloperId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameGenre\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.Genre\", null)\n .WithMany()\n .HasForeignKey(\"GenresId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GamePublisher\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.Company\", null)\n .WithMany()\n .HasForeignKey(\"PublisherId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameRedistributable\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.Redistributable\", null)\n .WithMany()\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameTag\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.Tag\", null)\n .WithMany()\n .HasForeignKey(\"TagsId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Action\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"Actions\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Archive\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"Archives\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Data.Models.Archive\", \"LastVersion\")\n .WithMany()\n .HasForeignKey(\"LastVersionId\");\n\n b.HasOne(\"LANCommander.Data.Models.Redistributable\", \"Redistributable\")\n .WithMany(\"Archives\")\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"LastVersion\");\n\n b.Navigation(\"Redistributable\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Category\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Category\", \"Parent\")\n .WithMany(\"Children\")\n .HasForeignKey(\"ParentId\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Parent\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Company\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Game\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.GameSave\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"GameSaves\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.NoAction)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"User\")\n .WithMany(\"GameSaves\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Genre\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Key\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"ClaimedByUser\")\n .WithMany()\n .HasForeignKey(\"ClaimedByUserId\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"Keys\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"ClaimedByUser\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Media\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"Media\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.MultiplayerMode\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"MultiplayerModes\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Redistributable\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.SavePath\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"SavePaths\")\n .HasForeignKey(\"GameId\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Script\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"Scripts\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Data.Models.Redistributable\", \"Redistributable\")\n .WithMany(\"Scripts\")\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"Redistributable\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Server\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Game\", \"Game\")\n .WithMany(\"Servers\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.NoAction);\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.ServerConsole\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Server\", \"Server\")\n .WithMany()\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.Server\", null)\n .WithMany(\"ServerConsoles\")\n .HasForeignKey(\"ServerId1\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.ServerHttpPath\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.Server\", \"Server\")\n .WithMany()\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.Server\", null)\n .WithMany(\"HttpPaths\")\n .HasForeignKey(\"ServerId1\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Tag\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRoleClaim\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.Role\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserClaim\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserLogin\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserRole\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.Role\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserToken\", b =>\n {\n b.HasOne(\"LANCommander.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Category\", b =>\n {\n b.Navigation(\"Children\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Game\", b =>\n {\n b.Navigation(\"Actions\");\n\n b.Navigation(\"Archives\");\n\n b.Navigation(\"GameSaves\");\n\n b.Navigation(\"Keys\");\n\n b.Navigation(\"Media\");\n\n b.Navigation(\"MultiplayerModes\");\n\n b.Navigation(\"SavePaths\");\n\n b.Navigation(\"Scripts\");\n\n b.Navigation(\"Servers\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Redistributable\", b =>\n {\n b.Navigation(\"Archives\");\n\n b.Navigation(\"Scripts\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.Server\", b =>\n {\n b.Navigation(\"HttpPaths\");\n\n b.Navigation(\"ServerConsoles\");\n });\n\n modelBuilder.Entity(\"LANCommander.Data.Models.User\", b =>\n {\n b.Navigation(\"GameSaves\");\n });\n#pragma warning restore 612, 618\n }", "docstring": "/// ", "url": "https://github.com/LANCommander/LANCommander/blob/51bc43ac93e3008391a2c78f958cbfc1d108bd80/LANCommander.Server.Data.SQLite/Migrations/20231105210731_RemoveDeprecatedServerHttpOptions.Designer.cs#L18-L1689", "sha": "51bc43ac93e3008391a2c78f958cbfc1d108bd80"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AddGameCustomFields.BuildTargetModel", "code": "protected override void BuildTargetModel(ModelBuilder modelBuilder)\n {\n#pragma warning disable 612, 618\n modelBuilder.HasAnnotation(\"ProductVersion\", \"8.0.10\");\n\n modelBuilder.Entity(\"CategoryGame\", b =>\n {\n b.Property(\"CategoriesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"CategoriesId\", \"GamesId\");\n\n b.HasIndex(\"GamesId\");\n\n b.ToTable(\"CategoryGame\");\n });\n\n modelBuilder.Entity(\"CollectionGame\", b =>\n {\n b.Property(\"CollectionId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"CollectionId\", \"GameId\");\n\n b.HasIndex(\"GameId\");\n\n b.ToTable(\"CollectionGame\");\n });\n\n modelBuilder.Entity(\"GameDeveloper\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"DeveloperId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"DeveloperId\");\n\n b.HasIndex(\"DeveloperId\");\n\n b.ToTable(\"GameDeveloper\");\n });\n\n modelBuilder.Entity(\"GameGenre\", b =>\n {\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GenresId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GamesId\", \"GenresId\");\n\n b.HasIndex(\"GenresId\");\n\n b.ToTable(\"GameGenre\");\n });\n\n modelBuilder.Entity(\"GamePlatform\", b =>\n {\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PlatformsId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GamesId\", \"PlatformsId\");\n\n b.HasIndex(\"PlatformsId\");\n\n b.ToTable(\"GamePlatform\");\n });\n\n modelBuilder.Entity(\"GamePublisher\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PublisherId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"PublisherId\");\n\n b.HasIndex(\"PublisherId\");\n\n b.ToTable(\"GamePublisher\");\n });\n\n modelBuilder.Entity(\"GameRedistributable\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"RedistributableId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.ToTable(\"GameRedistributable\");\n });\n\n modelBuilder.Entity(\"GameTag\", b =>\n {\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"TagsId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GamesId\", \"TagsId\");\n\n b.HasIndex(\"TagsId\");\n\n b.ToTable(\"GameTag\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Action\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Arguments\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PrimaryAction\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SortOrder\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"WorkingDirectory\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Actions\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Archive\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Changelog\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CompressedSize\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LastVersionId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ObjectKey\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"StorageLocationId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UncompressedSize\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Version\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"LastVersionId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.HasIndex(\"StorageLocationId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Archive\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Category\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ParentId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ParentId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Categories\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Collection\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Collections\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Company\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Companies\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Engine\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Engines\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Game\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"BaseGameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"DirectoryName\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"EngineId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"IGDBId\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"KeyAllocationMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Notes\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ReleasedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Singleplayer\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"SortTitle\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Title\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ValidKeyRegex\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"BaseGameId\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"EngineId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Games\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.GameCustomField\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasMaxLength(64)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .IsRequired()\n .HasMaxLength(1024)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"GameCustomField\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.GameSave\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Size\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"StorageLocationId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"StorageLocationId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"GameSaves\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Genre\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Genres\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Issue\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ResolvedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ResolvedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"ResolvedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Issues\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Key\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"AllocationMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimedByComputerName\")\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByIpv4Address\")\n .HasMaxLength(15)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByMacAddress\")\n .HasMaxLength(17)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByUserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .IsRequired()\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"ClaimedByUserId\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Keys\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Library\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\")\n .IsUnique();\n\n b.ToTable(\"Libraries\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Media\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Crc32\")\n .IsRequired()\n .HasMaxLength(8)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"FileId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"MimeType\")\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .HasMaxLength(64)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SourceUrl\")\n .HasMaxLength(2048)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"StorageLocationId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ThumbnailId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"StorageLocationId\");\n\n b.HasIndex(\"ThumbnailId\")\n .IsUnique();\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"Media\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.MultiplayerMode\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"MaxPlayers\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"MinPlayers\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"NetworkProtocol\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Spectators\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"MultiplayerModes\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Page\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Contents\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ParentId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Route\")\n .IsRequired()\n .HasMaxLength(2048)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Slug\")\n .IsRequired()\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SortOrder\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Title\")\n .IsRequired()\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ParentId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Pages\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Platform\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Platforms\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.PlaySession\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"End\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Start\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"PlaySessions\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Redistributable\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Notes\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Redistributables\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Role\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ConcurrencyStamp\")\n .IsConcurrencyToken()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"NormalizedName\")\n .IsUnique()\n .HasDatabaseName(\"RoleNameIndex\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Roles\", (string)null);\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.SavePath\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"IsRegex\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"WorkingDirectory\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"SavePaths\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Script\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Contents\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RequiresAdmin\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Scripts\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Server\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Arguments\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Autostart\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"AutostartDelay\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"AutostartMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Host\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"OnStartScriptPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"OnStopScriptPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Port\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ProcessTerminationMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UseShellExecute\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"WorkingDirectory\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Servers\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.ServerConsole\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Host\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Password\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Port\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ServerId\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId1\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"ServerId1\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"ServerConsoles\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.ServerHttpPath\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LocalPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId1\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"ServerId1\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"ServerHttpPath\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.StorageLocation\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Default\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"StorageLocations\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Tag\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Tags\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.User\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"AccessFailedCount\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Alias\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Approved\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ApprovedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ConcurrencyStamp\")\n .IsConcurrencyToken()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Email\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"EmailConfirmed\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"LockoutEnabled\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"LockoutEnd\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedEmail\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedUserName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PasswordHash\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PhoneNumber\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PhoneNumberConfirmed\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"RefreshToken\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RefreshTokenExpiration\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SecurityStamp\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"TwoFactorEnabled\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"NormalizedEmail\")\n .HasDatabaseName(\"EmailIndex\");\n\n b.HasIndex(\"NormalizedUserName\")\n .IsUnique()\n .HasDatabaseName(\"UserNameIndex\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Users\", (string)null);\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.UserCustomField\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasMaxLength(64)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .IsRequired()\n .HasMaxLength(1024)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"UserCustomField\");\n });\n\n modelBuilder.Entity(\"LibraryGame\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LibraryId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"LibraryId\");\n\n b.HasIndex(\"LibraryId\");\n\n b.ToTable(\"LibraryGame\");\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRoleClaim\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimType\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimValue\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"RoleClaims\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserClaim\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimType\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimValue\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"UserClaims\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserLogin\", b =>\n {\n b.Property(\"LoginProvider\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ProviderKey\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ProviderDisplayName\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"LoginProvider\", \"ProviderKey\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"UserLogins\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserRole\", b =>\n {\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"UserId\", \"RoleId\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"UserRoles\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserToken\", b =>\n {\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LoginProvider\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"UserId\", \"LoginProvider\", \"Name\");\n\n b.ToTable(\"UserTokens\", (string)null);\n });\n\n modelBuilder.Entity(\"PageGame\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PageId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"PageId\");\n\n b.HasIndex(\"PageId\");\n\n b.ToTable(\"PageGame\");\n });\n\n modelBuilder.Entity(\"PageRedistributable\", b =>\n {\n b.Property(\"PageId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"PageId\", \"RedistributableId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.ToTable(\"PageRedistributable\");\n });\n\n modelBuilder.Entity(\"PageServer\", b =>\n {\n b.Property(\"PageId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"PageId\", \"ServerId\");\n\n b.HasIndex(\"ServerId\");\n\n b.ToTable(\"PageServer\");\n });\n\n modelBuilder.Entity(\"RoleCollection\", b =>\n {\n b.Property(\"CollectionId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"CollectionId\", \"RoleId\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"RoleCollection\");\n });\n\n modelBuilder.Entity(\"CategoryGame\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Category\", null)\n .WithMany()\n .HasForeignKey(\"CategoriesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"CollectionGame\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Collection\", null)\n .WithMany()\n .HasForeignKey(\"CollectionId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameDeveloper\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Company\", null)\n .WithMany()\n .HasForeignKey(\"DeveloperId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameGenre\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Genre\", null)\n .WithMany()\n .HasForeignKey(\"GenresId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GamePlatform\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Platform\", null)\n .WithMany()\n .HasForeignKey(\"PlatformsId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GamePublisher\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Company\", null)\n .WithMany()\n .HasForeignKey(\"PublisherId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameRedistributable\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Redistributable\", null)\n .WithMany()\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameTag\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Tag\", null)\n .WithMany()\n .HasForeignKey(\"TagsId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Action\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Actions\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", \"Server\")\n .WithMany(\"Actions\")\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Archive\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Archives\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Archive\", \"LastVersion\")\n .WithMany()\n .HasForeignKey(\"LastVersionId\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.Redistributable\", \"Redistributable\")\n .WithMany(\"Archives\")\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.StorageLocation\", \"StorageLocation\")\n .WithMany(\"Archives\")\n .HasForeignKey(\"StorageLocationId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"LastVersion\");\n\n b.Navigation(\"Redistributable\");\n\n b.Navigation(\"StorageLocation\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Category\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Category\", \"Parent\")\n .WithMany(\"Children\")\n .HasForeignKey(\"ParentId\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Parent\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Collection\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Company\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Engine\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Game\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"BaseGame\")\n .WithMany(\"DependentGames\")\n .HasForeignKey(\"BaseGameId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Engine\", \"Engine\")\n .WithMany(\"Games\")\n .HasForeignKey(\"EngineId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"BaseGame\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Engine\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.GameCustomField\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"CustomFields\")\n .HasForeignKey(\"GameId\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.GameSave\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"GameSaves\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.StorageLocation\", \"StorageLocation\")\n .WithMany(\"GameSaves\")\n .HasForeignKey(\"StorageLocationId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithMany(\"GameSaves\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"StorageLocation\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Genre\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Issue\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Issues\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"ResolvedBy\")\n .WithMany()\n .HasForeignKey(\"ResolvedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"ResolvedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Key\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"ClaimedByUser\")\n .WithMany()\n .HasForeignKey(\"ClaimedByUserId\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Keys\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"ClaimedByUser\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Library\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithOne(\"Library\")\n .HasForeignKey(\"LANCommander.Server.Data.Models.Library\", \"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Media\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Media\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.StorageLocation\", \"StorageLocation\")\n .WithMany(\"Media\")\n .HasForeignKey(\"StorageLocationId\")\n .OnDelete(DeleteBehavior.SetNull)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Media\", \"Thumbnail\")\n .WithOne(\"Parent\")\n .HasForeignKey(\"LANCommander.Server.Data.Models.Media\", \"ThumbnailId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithMany(\"Media\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"StorageLocation\");\n\n b.Navigation(\"Thumbnail\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.MultiplayerMode\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"MultiplayerModes\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Page\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Page\", \"Parent\")\n .WithMany(\"Children\")\n .HasForeignKey(\"ParentId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Parent\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Platform\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.PlaySession\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"PlaySessions\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithMany(\"PlaySessions\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Redistributable\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Role\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.SavePath\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"SavePaths\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Script\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Scripts\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Redistributable\", \"Redistributable\")\n .WithMany(\"Scripts\")\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", \"Server\")\n .WithMany(\"Scripts\")\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"Redistributable\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Server\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Servers\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.ServerConsole\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", \"Server\")\n .WithMany()\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", null)\n .WithMany(\"ServerConsoles\")\n .HasForeignKey(\"ServerId1\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.ServerHttpPath\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", \"Server\")\n .WithMany()\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", null)\n .WithMany(\"HttpPaths\")\n .HasForeignKey(\"ServerId1\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.StorageLocation\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Tag\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.User\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.UserCustomField\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithMany(\"CustomFields\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LibraryGame\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Library\", null)\n .WithMany()\n .HasForeignKey(\"LibraryId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRoleClaim\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Role\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserClaim\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserLogin\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserRole\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Role\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserToken\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"PageGame\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Page\", null)\n .WithMany()\n .HasForeignKey(\"PageId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"PageRedistributable\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Page\", null)\n .WithMany()\n .HasForeignKey(\"PageId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Redistributable\", null)\n .WithMany()\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"PageServer\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Page\", null)\n .WithMany()\n .HasForeignKey(\"PageId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", null)\n .WithMany()\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"RoleCollection\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Collection\", null)\n .WithMany()\n .HasForeignKey(\"CollectionId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Role\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Category\", b =>\n {\n b.Navigation(\"Children\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Engine\", b =>\n {\n b.Navigation(\"Games\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Game\", b =>\n {\n b.Navigation(\"Actions\");\n\n b.Navigation(\"Archives\");\n\n b.Navigation(\"CustomFields\");\n\n b.Navigation(\"DependentGames\");\n\n b.Navigation(\"GameSaves\");\n\n b.Navigation(\"Issues\");\n\n b.Navigation(\"Keys\");\n\n b.Navigation(\"Media\");\n\n b.Navigation(\"MultiplayerModes\");\n\n b.Navigation(\"PlaySessions\");\n\n b.Navigation(\"SavePaths\");\n\n b.Navigation(\"Scripts\");\n\n b.Navigation(\"Servers\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Media\", b =>\n {\n b.Navigation(\"Parent\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Page\", b =>\n {\n b.Navigation(\"Children\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Redistributable\", b =>\n {\n b.Navigation(\"Archives\");\n\n b.Navigation(\"Scripts\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Server\", b =>\n {\n b.Navigation(\"Actions\");\n\n b.Navigation(\"HttpPaths\");\n\n b.Navigation(\"Scripts\");\n\n b.Navigation(\"ServerConsoles\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.StorageLocation\", b =>\n {\n b.Navigation(\"Archives\");\n\n b.Navigation(\"GameSaves\");\n\n b.Navigation(\"Media\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.User\", b =>\n {\n b.Navigation(\"CustomFields\");\n\n b.Navigation(\"GameSaves\");\n\n b.Navigation(\"Library\")\n .IsRequired();\n\n b.Navigation(\"Media\");\n\n b.Navigation(\"PlaySessions\");\n });\n#pragma warning restore 612, 618\n }", "docstring": "/// ", "url": "https://github.com/LANCommander/LANCommander/blob/51bc43ac93e3008391a2c78f958cbfc1d108bd80/LANCommander.Server.Data.SQLite/Migrations/20250131011110_AddGameCustomFields.Designer.cs#L18-L2792", "sha": "51bc43ac93e3008391a2c78f958cbfc1d108bd80"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AddRedistributables.Up", "code": "protected override void Up(MigrationBuilder migrationBuilder)\n {\n migrationBuilder.AlterColumn(\n name: \"GameId\",\n table: \"Scripts\",\n type: \"TEXT\",\n nullable: true,\n oldClrType: typeof(Guid),\n oldType: \"TEXT\");\n\n migrationBuilder.AddColumn(\n name: \"RedistributableId\",\n table: \"Scripts\",\n type: \"TEXT\",\n nullable: true);\n\n migrationBuilder.AlterColumn(\n name: \"GameId\",\n table: \"Archive\",\n type: \"TEXT\",\n nullable: true,\n oldClrType: typeof(Guid),\n oldType: \"TEXT\");\n\n migrationBuilder.AddColumn(\n name: \"RedistributableId\",\n table: \"Archive\",\n type: \"TEXT\",\n nullable: true);\n\n migrationBuilder.CreateTable(\n name: \"Redistributables\",\n columns: table => new\n {\n Id = table.Column(type: \"TEXT\", nullable: false),\n Name = table.Column(type: \"TEXT\", nullable: false),\n Description = table.Column(type: \"TEXT\", nullable: true),\n Notes = table.Column(type: \"TEXT\", nullable: true),\n CreatedOn = table.Column(type: \"TEXT\", nullable: false),\n CreatedById = table.Column(type: \"TEXT\", nullable: true),\n UpdatedOn = table.Column(type: \"TEXT\", nullable: false),\n UpdatedById = table.Column(type: \"TEXT\", nullable: true)\n },\n constraints: table =>\n {\n table.PrimaryKey(\"PK_Redistributables\", x => x.Id);\n table.ForeignKey(\n name: \"FK_Redistributables_AspNetUsers_CreatedById\",\n column: x => x.CreatedById,\n principalTable: \"AspNetUsers\",\n principalColumn: \"Id\");\n table.ForeignKey(\n name: \"FK_Redistributables_AspNetUsers_UpdatedById\",\n column: x => x.UpdatedById,\n principalTable: \"AspNetUsers\",\n principalColumn: \"Id\");\n });\n\n migrationBuilder.CreateIndex(\n name: \"IX_Scripts_RedistributableId\",\n table: \"Scripts\",\n column: \"RedistributableId\");\n\n migrationBuilder.CreateIndex(\n name: \"IX_Archive_RedistributableId\",\n table: \"Archive\",\n column: \"RedistributableId\");\n\n migrationBuilder.CreateIndex(\n name: \"IX_Redistributables_CreatedById\",\n table: \"Redistributables\",\n column: \"CreatedById\");\n\n migrationBuilder.CreateIndex(\n name: \"IX_Redistributables_UpdatedById\",\n table: \"Redistributables\",\n column: \"UpdatedById\");\n\n migrationBuilder.AddForeignKey(\n name: \"FK_Archive_Redistributables_RedistributableId\",\n table: \"Archive\",\n column: \"RedistributableId\",\n principalTable: \"Redistributables\",\n principalColumn: \"Id\",\n onDelete: ReferentialAction.Cascade);\n\n migrationBuilder.AddForeignKey(\n name: \"FK_Scripts_Redistributables_RedistributableId\",\n table: \"Scripts\",\n column: \"RedistributableId\",\n principalTable: \"Redistributables\",\n principalColumn: \"Id\",\n onDelete: ReferentialAction.Cascade);\n }", "docstring": "/// ", "url": "https://github.com/LANCommander/LANCommander/blob/51bc43ac93e3008391a2c78f958cbfc1d108bd80/LANCommander.Server.Data.SQLite/Migrations/20231018060152_AddRedistributables.cs#L12-L105", "sha": "51bc43ac93e3008391a2c78f958cbfc1d108bd80"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AddStorageLocations.BuildTargetModel", "code": "protected override void BuildTargetModel(ModelBuilder modelBuilder)\n {\n#pragma warning disable 612, 618\n modelBuilder\n .HasAnnotation(\"ProductVersion\", \"8.0.8\")\n .HasAnnotation(\"Proxies:ChangeTracking\", false)\n .HasAnnotation(\"Proxies:CheckEquality\", false)\n .HasAnnotation(\"Proxies:LazyLoading\", true);\n\n modelBuilder.Entity(\"CategoryGame\", b =>\n {\n b.Property(\"CategoriesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"CategoriesId\", \"GamesId\");\n\n b.HasIndex(\"GamesId\");\n\n b.ToTable(\"CategoryGame\");\n });\n\n modelBuilder.Entity(\"CollectionGame\", b =>\n {\n b.Property(\"CollectionId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"CollectionId\", \"GameId\");\n\n b.HasIndex(\"GameId\");\n\n b.ToTable(\"CollectionGame\");\n });\n\n modelBuilder.Entity(\"GameDeveloper\", b =>\n {\n b.Property(\"DeveloperId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"DeveloperId\", \"GameId\");\n\n b.HasIndex(\"GameId\");\n\n b.ToTable(\"GameDeveloper\");\n });\n\n modelBuilder.Entity(\"GameGenre\", b =>\n {\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GenresId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GamesId\", \"GenresId\");\n\n b.HasIndex(\"GenresId\");\n\n b.ToTable(\"GameGenre\");\n });\n\n modelBuilder.Entity(\"GamePlatform\", b =>\n {\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PlatformsId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GamesId\", \"PlatformsId\");\n\n b.HasIndex(\"PlatformsId\");\n\n b.ToTable(\"GamePlatform\");\n });\n\n modelBuilder.Entity(\"GamePublisher\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PublisherId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"PublisherId\");\n\n b.HasIndex(\"PublisherId\");\n\n b.ToTable(\"GamePublisher\");\n });\n\n modelBuilder.Entity(\"GameRedistributable\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"RedistributableId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.ToTable(\"GameRedistributable\");\n });\n\n modelBuilder.Entity(\"GameTag\", b =>\n {\n b.Property(\"GamesId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"TagsId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GamesId\", \"TagsId\");\n\n b.HasIndex(\"TagsId\");\n\n b.ToTable(\"GameTag\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Action\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Arguments\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PrimaryAction\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SortOrder\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"WorkingDirectory\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Actions\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Archive\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Changelog\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CompressedSize\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LastVersionId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ObjectKey\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"StorageLocationId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UncompressedSize\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Version\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"LastVersionId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.HasIndex(\"StorageLocationId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Archive\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Category\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ParentId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ParentId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Categories\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Collection\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Collections\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Company\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Companies\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Engine\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Engines\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Game\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"BaseGameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"DirectoryName\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"EngineId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"IGDBId\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"KeyAllocationMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Notes\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ReleasedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Singleplayer\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"SortTitle\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Title\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ValidKeyRegex\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"BaseGameId\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"EngineId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Games\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.GameSave\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"StorageLocationId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"StorageLocationId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"GameSaves\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Genre\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Genres\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Issue\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ResolvedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ResolvedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"ResolvedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Issues\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Key\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"AllocationMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimedByComputerName\")\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByIpv4Address\")\n .HasMaxLength(15)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByMacAddress\")\n .HasMaxLength(17)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedByUserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .IsRequired()\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"ClaimedByUserId\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Keys\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Media\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Crc32\")\n .IsRequired()\n .HasMaxLength(8)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"FileId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"MimeType\")\n .HasMaxLength(255)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .HasMaxLength(64)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SourceUrl\")\n .HasMaxLength(2048)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"StorageLocationId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ThumbnailId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"StorageLocationId\");\n\n b.HasIndex(\"ThumbnailId\")\n .IsUnique();\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"Media\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.MultiplayerMode\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"MaxPlayers\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"MinPlayers\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"NetworkProtocol\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Spectators\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"MultiplayerModes\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Page\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Contents\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ParentId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Route\")\n .IsRequired()\n .HasMaxLength(2048)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Slug\")\n .IsRequired()\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SortOrder\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Title\")\n .IsRequired()\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ParentId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Pages\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Platform\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Platforms\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.PlaySession\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"End\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Start\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"PlaySessions\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Redistributable\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Notes\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Redistributables\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Role\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ConcurrencyStamp\")\n .IsConcurrencyToken()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"NormalizedName\")\n .IsUnique()\n .HasDatabaseName(\"RoleNameIndex\");\n\n b.ToTable(\"AspNetRoles\", (string)null);\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.SavePath\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"IsRegex\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"WorkingDirectory\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"SavePaths\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Script\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Contents\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Description\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RequiresAdmin\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Scripts\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Server\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Arguments\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Autostart\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"AutostartDelay\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"AutostartMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Host\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"OnStartScriptPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"OnStopScriptPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Port\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ProcessTerminationMethod\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UseShellExecute\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"WorkingDirectory\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"GameId\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Servers\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.ServerConsole\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Host\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Password\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Port\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ServerId\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId1\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"ServerId1\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"ServerConsoles\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.ServerHttpPath\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LocalPath\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId1\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"ServerId\");\n\n b.HasIndex(\"ServerId1\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"ServerHttpPath\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.StorageLocation\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Default\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Path\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Type\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"StorageLocations\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Tag\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.ToTable(\"Tags\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.User\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"AccessFailedCount\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"Alias\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Approved\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ApprovedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ConcurrencyStamp\")\n .IsConcurrencyToken()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Email\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"EmailConfirmed\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"LockoutEnabled\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"LockoutEnd\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedEmail\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"NormalizedUserName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PasswordHash\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PhoneNumber\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PhoneNumberConfirmed\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"RefreshToken\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RefreshTokenExpiration\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"SecurityStamp\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"TwoFactorEnabled\")\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"UserName\")\n .HasMaxLength(256)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"NormalizedEmail\")\n .HasDatabaseName(\"EmailIndex\");\n\n b.HasIndex(\"NormalizedUserName\")\n .IsUnique()\n .HasDatabaseName(\"UserNameIndex\");\n\n b.ToTable(\"AspNetUsers\", (string)null);\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.UserCustomField\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"CreatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasMaxLength(64)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedById\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UpdatedOn\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .IsRequired()\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .IsRequired()\n .HasMaxLength(1024)\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"CreatedById\");\n\n b.HasIndex(\"UpdatedById\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"UserCustomField\");\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRoleClaim\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimType\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimValue\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"AspNetRoleClaims\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserClaim\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"INTEGER\");\n\n b.Property(\"ClaimType\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ClaimValue\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"AspNetUserClaims\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserLogin\", b =>\n {\n b.Property(\"LoginProvider\")\n .HasMaxLength(128)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ProviderKey\")\n .HasMaxLength(128)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ProviderDisplayName\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"LoginProvider\", \"ProviderKey\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"AspNetUserLogins\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserRole\", b =>\n {\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"UserId\", \"RoleId\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"AspNetUserRoles\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserToken\", b =>\n {\n b.Property(\"UserId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"LoginProvider\")\n .HasMaxLength(128)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Name\")\n .HasMaxLength(128)\n .HasColumnType(\"TEXT\");\n\n b.Property(\"Value\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"UserId\", \"LoginProvider\", \"Name\");\n\n b.ToTable(\"AspNetUserTokens\", (string)null);\n });\n\n modelBuilder.Entity(\"PageGame\", b =>\n {\n b.Property(\"GameId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"PageId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"GameId\", \"PageId\");\n\n b.HasIndex(\"PageId\");\n\n b.ToTable(\"PageGame\");\n });\n\n modelBuilder.Entity(\"PageRedistributable\", b =>\n {\n b.Property(\"PageId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RedistributableId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"PageId\", \"RedistributableId\");\n\n b.HasIndex(\"RedistributableId\");\n\n b.ToTable(\"PageRedistributable\");\n });\n\n modelBuilder.Entity(\"PageServer\", b =>\n {\n b.Property(\"PageId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"ServerId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"PageId\", \"ServerId\");\n\n b.HasIndex(\"ServerId\");\n\n b.ToTable(\"PageServer\");\n });\n\n modelBuilder.Entity(\"RoleCollection\", b =>\n {\n b.Property(\"CollectionId\")\n .HasColumnType(\"TEXT\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"TEXT\");\n\n b.HasKey(\"CollectionId\", \"RoleId\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"RoleCollection\");\n });\n\n modelBuilder.Entity(\"CategoryGame\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Category\", null)\n .WithMany()\n .HasForeignKey(\"CategoriesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"CollectionGame\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Collection\", null)\n .WithMany()\n .HasForeignKey(\"CollectionId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameDeveloper\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Company\", null)\n .WithMany()\n .HasForeignKey(\"DeveloperId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameGenre\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Genre\", null)\n .WithMany()\n .HasForeignKey(\"GenresId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GamePlatform\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Platform\", null)\n .WithMany()\n .HasForeignKey(\"PlatformsId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GamePublisher\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Company\", null)\n .WithMany()\n .HasForeignKey(\"PublisherId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameRedistributable\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Redistributable\", null)\n .WithMany()\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"GameTag\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GamesId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Tag\", null)\n .WithMany()\n .HasForeignKey(\"TagsId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Action\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Actions\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", \"Server\")\n .WithMany(\"Actions\")\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Archive\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Archives\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Archive\", \"LastVersion\")\n .WithMany()\n .HasForeignKey(\"LastVersionId\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.Redistributable\", \"Redistributable\")\n .WithMany(\"Archives\")\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.StorageLocation\", \"StorageLocation\")\n .WithMany(\"Archives\")\n .HasForeignKey(\"StorageLocationId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"LastVersion\");\n\n b.Navigation(\"Redistributable\");\n\n b.Navigation(\"StorageLocation\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Category\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Category\", \"Parent\")\n .WithMany(\"Children\")\n .HasForeignKey(\"ParentId\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Parent\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Collection\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Company\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Engine\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Game\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"BaseGame\")\n .WithMany(\"DependentGames\")\n .HasForeignKey(\"BaseGameId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Engine\", \"Engine\")\n .WithMany(\"Games\")\n .HasForeignKey(\"EngineId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"BaseGame\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Engine\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.GameSave\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"GameSaves\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.StorageLocation\", \"StorageLocation\")\n .WithMany(\"GameSaves\")\n .HasForeignKey(\"StorageLocationId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithMany(\"GameSaves\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"StorageLocation\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Genre\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Issue\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Issues\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.SetNull)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"ResolvedBy\")\n .WithMany()\n .HasForeignKey(\"ResolvedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"ResolvedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Key\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"ClaimedByUser\")\n .WithMany()\n .HasForeignKey(\"ClaimedByUserId\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Keys\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"ClaimedByUser\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Media\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Media\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.StorageLocation\", \"StorageLocation\")\n .WithMany(\"Media\")\n .HasForeignKey(\"StorageLocationId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Media\", \"Thumbnail\")\n .WithOne(\"Parent\")\n .HasForeignKey(\"LANCommander.Server.Data.Models.Media\", \"ThumbnailId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithMany(\"Media\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"StorageLocation\");\n\n b.Navigation(\"Thumbnail\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.MultiplayerMode\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"MultiplayerModes\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Page\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Page\", \"Parent\")\n .WithMany(\"Children\")\n .HasForeignKey(\"ParentId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Parent\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Platform\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.PlaySession\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"PlaySessions\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithMany(\"PlaySessions\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Redistributable\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.SavePath\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"SavePaths\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Script\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Scripts\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Redistributable\", \"Redistributable\")\n .WithMany(\"Scripts\")\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", \"Server\")\n .WithMany(\"Scripts\")\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"Redistributable\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Server\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", \"Game\")\n .WithMany(\"Servers\")\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Game\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.ServerConsole\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", \"Server\")\n .WithMany()\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", null)\n .WithMany(\"ServerConsoles\")\n .HasForeignKey(\"ServerId1\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.ServerHttpPath\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", \"Server\")\n .WithMany()\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", null)\n .WithMany(\"HttpPaths\")\n .HasForeignKey(\"ServerId1\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"Server\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.StorageLocation\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Tag\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\")\n .OnDelete(DeleteBehavior.SetNull);\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.UserCustomField\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"CreatedBy\")\n .WithMany()\n .HasForeignKey(\"CreatedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"UpdatedBy\")\n .WithMany()\n .HasForeignKey(\"UpdatedById\");\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", \"User\")\n .WithMany(\"CustomFields\")\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"CreatedBy\");\n\n b.Navigation(\"UpdatedBy\");\n\n b.Navigation(\"User\");\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRoleClaim\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Role\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserClaim\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserLogin\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserRole\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Role\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserToken\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.User\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"PageGame\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Game\", null)\n .WithMany()\n .HasForeignKey(\"GameId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Page\", null)\n .WithMany()\n .HasForeignKey(\"PageId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"PageRedistributable\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Page\", null)\n .WithMany()\n .HasForeignKey(\"PageId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Redistributable\", null)\n .WithMany()\n .HasForeignKey(\"RedistributableId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"PageServer\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Page\", null)\n .WithMany()\n .HasForeignKey(\"PageId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Server\", null)\n .WithMany()\n .HasForeignKey(\"ServerId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"RoleCollection\", b =>\n {\n b.HasOne(\"LANCommander.Server.Data.Models.Collection\", null)\n .WithMany()\n .HasForeignKey(\"CollectionId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"LANCommander.Server.Data.Models.Role\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Category\", b =>\n {\n b.Navigation(\"Children\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Engine\", b =>\n {\n b.Navigation(\"Games\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Game\", b =>\n {\n b.Navigation(\"Actions\");\n\n b.Navigation(\"Archives\");\n\n b.Navigation(\"DependentGames\");\n\n b.Navigation(\"GameSaves\");\n\n b.Navigation(\"Issues\");\n\n b.Navigation(\"Keys\");\n\n b.Navigation(\"Media\");\n\n b.Navigation(\"MultiplayerModes\");\n\n b.Navigation(\"PlaySessions\");\n\n b.Navigation(\"SavePaths\");\n\n b.Navigation(\"Scripts\");\n\n b.Navigation(\"Servers\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Media\", b =>\n {\n b.Navigation(\"Parent\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Page\", b =>\n {\n b.Navigation(\"Children\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Redistributable\", b =>\n {\n b.Navigation(\"Archives\");\n\n b.Navigation(\"Scripts\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.Server\", b =>\n {\n b.Navigation(\"Actions\");\n\n b.Navigation(\"HttpPaths\");\n\n b.Navigation(\"Scripts\");\n\n b.Navigation(\"ServerConsoles\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.StorageLocation\", b =>\n {\n b.Navigation(\"Archives\");\n\n b.Navigation(\"GameSaves\");\n\n b.Navigation(\"Media\");\n });\n\n modelBuilder.Entity(\"LANCommander.Server.Data.Models.User\", b =>\n {\n b.Navigation(\"CustomFields\");\n\n b.Navigation(\"GameSaves\");\n\n b.Navigation(\"Media\");\n\n b.Navigation(\"PlaySessions\");\n });\n#pragma warning restore 612, 618\n }", "docstring": "/// ", "url": "https://github.com/LANCommander/LANCommander/blob/51bc43ac93e3008391a2c78f958cbfc1d108bd80/LANCommander.Server.Data.SQLite/Migrations/20241011014752_AddStorageLocations.Designer.cs#L18-L2573", "sha": "51bc43ac93e3008391a2c78f958cbfc1d108bd80"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AddCollections.Up", "code": "protected override void Up(MigrationBuilder migrationBuilder)\n {\n migrationBuilder.CreateTable(\n name: \"Collections\",\n columns: table => new\n {\n Id = table.Column(type: \"TEXT\", nullable: false),\n Name = table.Column(type: \"TEXT\", nullable: false),\n CreatedOn = table.Column(type: \"TEXT\", nullable: false),\n CreatedById = table.Column(type: \"TEXT\", nullable: true),\n UpdatedOn = table.Column(type: \"TEXT\", nullable: false),\n UpdatedById = table.Column(type: \"TEXT\", nullable: true)\n },\n constraints: table =>\n {\n table.PrimaryKey(\"PK_Collections\", x => x.Id);\n table.ForeignKey(\n name: \"FK_Collections_AspNetUsers_CreatedById\",\n column: x => x.CreatedById,\n principalTable: \"AspNetUsers\",\n principalColumn: \"Id\",\n onDelete: ReferentialAction.SetNull);\n table.ForeignKey(\n name: \"FK_Collections_AspNetUsers_UpdatedById\",\n column: x => x.UpdatedById,\n principalTable: \"AspNetUsers\",\n principalColumn: \"Id\",\n onDelete: ReferentialAction.SetNull);\n });\n\n migrationBuilder.CreateTable(\n name: \"CollectionGame\",\n columns: table => new\n {\n CollectionId = table.Column(type: \"TEXT\", nullable: false),\n GameId = table.Column(type: \"TEXT\", nullable: false)\n },\n constraints: table =>\n {\n table.PrimaryKey(\"PK_CollectionGame\", x => new { x.CollectionId, x.GameId });\n table.ForeignKey(\n name: \"FK_CollectionGame_Collections_CollectionId\",\n column: x => x.CollectionId,\n principalTable: \"Collections\",\n principalColumn: \"Id\",\n onDelete: ReferentialAction.Cascade);\n table.ForeignKey(\n name: \"FK_CollectionGame_Games_GameId\",\n column: x => x.GameId,\n principalTable: \"Games\",\n principalColumn: \"Id\",\n onDelete: ReferentialAction.Cascade);\n });\n\n migrationBuilder.CreateTable(\n name: \"RoleCollection\",\n columns: table => new\n {\n CollectionId = table.Column(type: \"TEXT\", nullable: false),\n RoleId = table.Column(type: \"TEXT\", nullable: false)\n },\n constraints: table =>\n {\n table.PrimaryKey(\"PK_RoleCollection\", x => new { x.CollectionId, x.RoleId });\n table.ForeignKey(\n name: \"FK_RoleCollection_AspNetRoles_RoleId\",\n column: x => x.RoleId,\n principalTable: \"AspNetRoles\",\n principalColumn: \"Id\",\n onDelete: ReferentialAction.Cascade);\n table.ForeignKey(\n name: \"FK_RoleCollection_Collections_CollectionId\",\n column: x => x.CollectionId,\n principalTable: \"Collections\",\n principalColumn: \"Id\",\n onDelete: ReferentialAction.Cascade);\n });\n\n migrationBuilder.CreateIndex(\n name: \"IX_CollectionGame_GameId\",\n table: \"CollectionGame\",\n column: \"GameId\");\n\n migrationBuilder.CreateIndex(\n name: \"IX_Collections_CreatedById\",\n table: \"Collections\",\n column: \"CreatedById\");\n\n migrationBuilder.CreateIndex(\n name: \"IX_Collections_UpdatedById\",\n table: \"Collections\",\n column: \"UpdatedById\");\n\n migrationBuilder.CreateIndex(\n name: \"IX_RoleCollection_RoleId\",\n table: \"RoleCollection\",\n column: \"RoleId\");\n }", "docstring": "/// ", "url": "https://github.com/LANCommander/LANCommander/blob/51bc43ac93e3008391a2c78f958cbfc1d108bd80/LANCommander.Server.Data.SQLite/Migrations/20231204044928_AddCollections.cs#L12-L109", "sha": "51bc43ac93e3008391a2c78f958cbfc1d108bd80"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CylinderSurface.TransformScale", "code": "private float TransformScale(float val)\n {\n return val * _cylinder.transform.lossyScale.x;\n }", "docstring": "/// ", "url": "https://github.com/Shopify/handy/blob/fa8686c291b1fe80e695bd8ad2474e2e852fd58b/Handy/Assets/Oculus/Interaction/Runtime/Scripts/Interaction/Surfaces/CylinderSurface.cs#L225-L228", "sha": "fa8686c291b1fe80e695bd8ad2474e2e852fd58b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VoipOptions.SetBitrateForNewConnections", "code": "public void SetBitrateForNewConnections(VoipBitrate value) {\n CAPI.ovr_VoipOptions_SetBitrateForNewConnections(Handle, value);\n }", "docstring": "/// Sets the maximum average bitrate the audio codec should use. Higher", "url": "https://github.com/Shopify/handy/blob/fa8686c291b1fe80e695bd8ad2474e2e852fd58b/Handy/Assets/Oculus/Platform/Scripts/VoipOptions.cs#L22-L24", "sha": "fa8686c291b1fe80e695bd8ad2474e2e852fd58b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AdvancedAbuseReportOptions.SetObjectType", "code": "public void SetObjectType(string value) {\n CAPI.ovr_AdvancedAbuseReportOptions_SetObjectType(Handle, value);\n }", "docstring": "/// If report_type is content, a string representing the type of content being", "url": "https://github.com/Shopify/handy/blob/fa8686c291b1fe80e695bd8ad2474e2e852fd58b/Handy/Assets/Oculus/Platform/Scripts/AdvancedAbuseReportOptions.cs#L27-L29", "sha": "fa8686c291b1fe80e695bd8ad2474e2e852fd58b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OVRExtensions.ToTrackingSpacePose", "code": "public static OVRPose ToTrackingSpacePose(this Transform transform, Camera camera)\n\t{\n\t\t// Initializing to identity, but for all Oculus headsets, down below the pose will be initialized to the runtime's pose value, so identity will never be returned.\n\t\tOVRPose headPose = OVRPose.identity;\n\n\t\tVector3 pos;\n\t\tQuaternion rot;\n\t\tif (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))\n\t\t\theadPose.position = pos;\n\t\tif (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))\n\t\t\theadPose.orientation = rot;\n\n\t\tvar ret = headPose * transform.ToHeadSpacePose(camera);\n\n\t\treturn ret;\n\t}", "docstring": "/// ", "url": "https://github.com/Shopify/handy/blob/fa8686c291b1fe80e695bd8ad2474e2e852fd58b/Handy/Assets/Oculus/VR/Scripts/OVRCommon.cs#L52-L67", "sha": "fa8686c291b1fe80e695bd8ad2474e2e852fd58b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WitEditorUI.LayoutHeaderButton", "code": "public static void LayoutHeaderButton(Texture2D headerTexture, string headerURL)\n {\n if (headerTexture != null)\n {\n GUILayout.BeginHorizontal();\n GUILayout.FlexibleSpace();\n float maxWidth = EditorGUIUtility.currentViewWidth - WitStyles.WindowPaddingLeft - WitStyles.WindowPaddingRight - WitStyles.IconButton.CalcSize(WitStyles.HelpIcon).x;\n float headerWidth = Mathf.Min(WitStyles.HeaderWidth, maxWidth);\n float headerHeight = headerWidth * (float)headerTexture.height / (float)headerTexture.width;\n if (GUILayout.Button(headerTexture, WitStyles.HeaderButton, GUILayout.Width(headerWidth), GUILayout.Height(headerHeight)) && !string.IsNullOrEmpty(headerURL))\n {\n Application.OpenURL(headerURL);\n }\n GUILayout.FlexibleSpace();\n if (LayoutIconButton(WitStyles.HelpIcon))\n {\n Application.OpenURL(headerURL);\n }\n GUILayout.EndHorizontal();\n }\n }", "docstring": "// Layout header button", "url": "https://github.com/Shopify/handy/blob/fa8686c291b1fe80e695bd8ad2474e2e852fd58b/Handy/Assets/Oculus/Voice/Lib/Wit.ai/Scripts/Editor/WitEditorUI.cs#L115-L135", "sha": "fa8686c291b1fe80e695bd8ad2474e2e852fd58b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AssetDatabaseUtility.FindUnityAsset", "code": "public static T FindUnityAsset(string filter) where T : UnityEngine.Object\n {\n T[] results = FindUnityAssets(filter, true);\n if (results != null && results.Length > 0)\n {\n return results[0];\n }\n return null;\n }", "docstring": "// Find Unity asset", "url": "https://github.com/Shopify/handy/blob/fa8686c291b1fe80e695bd8ad2474e2e852fd58b/Handy/Assets/Oculus/Voice/Lib/Wit.ai/Scripts/Editor/Utility/AssetDatabaseUtility.cs#L17-L25", "sha": "fa8686c291b1fe80e695bd8ad2474e2e852fd58b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DisablePopUpEffects.TargetMethods", "code": "private static IEnumerable TargetMethods() =>\n typeof(PopFXManager).GetMethods()\n .Where(it => it.Name.StartsWith(\"SpawnFX\"));", "docstring": "// ReSharper disable once UnusedMember.Local", "url": "https://github.com/onimp/oni_multiplayer/blob/926abd3b5f177b12ab4cc6d6aa537dcdb09684b7/src/MultiplayerMod/Game/Effects/DisablePopUpEffects.cs#L15-L17", "sha": "926abd3b5f177b12ab4cc6d6aa537dcdb09684b7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DNSHelperTests.ATest", "code": "[TestMethod]\n public void ATest()\n {\n string? ip = DNSHelpers.AQuery(\"hypixel.net\");\n Console.WriteLine(ip);\n Assert.IsNotNull(string.IsNullOrEmpty(ip));\n }", "docstring": "/// ", "url": "https://github.com/zkhssb/NectarRCON/blob/e7349a9bd8533522b7c105fe3e0ab79325ad8e05/NectarRCON.Tests/DNSHelperTests.cs#L10-L16", "sha": "e7349a9bd8533522b7c105fe3e0ab79325ad8e05"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GithubUpdater.GetLatestVersion", "code": "private AppVersion? GetLatestVersion(bool enablePre)\n {\n if (_version is null)\n return null;\n using(HttpRequestMessage request = new(HttpMethod.Get, \"releases/latest\"))\n {\n request.Headers.Add(\"User-Agent\", $\"{_version.AppName}-AppUpdater\");\n using(HttpResponseMessage response = _client.Send(request))\n {\n if (!response.IsSuccessStatusCode)\n throw new HttpRequestException(response.StatusCode.ToString());\n string resultString = string.Empty;\n Task.Run(async () =>\n {\n resultString = await response.Content.ReadAsStringAsync();\n }).Wait();\n Release release = JsonSerializer.Deserialize(resultString) ?? throw new JsonException();\n foreach(var asset in release.Assets)\n {\n string fileName = Path.GetFileNameWithoutExtension(asset.Name);\n try\n {\n fileName = \"NectarRcon-x86-1.0.0-beta2\";\n AppVersion version = AppVersion.ParseVersion(fileName);\n if(version.AppName.ToLower() == _version.AppName.ToLower() && version.Platform.ToLower() == _version.Platform.ToLower())\n {\n if (version.IsPreRelease && !enablePre)\n continue;\n if (version > _version)\n {\n return version;\n }\n }\n }\n catch (InvalidOperationException) { } // Invalid version format\n }\n return null;\n }\n }\n }", "docstring": "/// ", "url": "https://github.com/zkhssb/NectarRCON/blob/e7349a9bd8533522b7c105fe3e0ab79325ad8e05/NectarRCON.Updater/GithubUpdater.cs#L24-L63", "sha": "e7349a9bd8533522b7c105fe3e0ab79325ad8e05"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UniTaskExtensions.AsUniTask", "code": "public static UniTask AsUniTask(this Task task, bool useCurrentSynchronizationContext = true)\n {\n var promise = new UniTaskCompletionSource();\n\n task.ContinueWith((x, state) =>\n {\n var p = (UniTaskCompletionSource)state;\n\n switch (x.Status)\n {\n case TaskStatus.Canceled:\n p.TrySetCanceled();\n break;\n case TaskStatus.Faulted:\n p.TrySetException(x.Exception);\n break;\n case TaskStatus.RanToCompletion:\n p.TrySetResult(x.Result);\n break;\n default:\n throw new NotSupportedException();\n }\n }, promise, useCurrentSynchronizationContext ? TaskScheduler.FromCurrentSynchronizationContext() : TaskScheduler.Current);\n\n return promise.Task;\n }", "docstring": "/// ", "url": "https://github.com/MMMaellon/SmartObjectSync/blob/f600f5a5df125ac4e01d35c43ed60d0d2b3c0594/Packages/com.vrchat.base/Runtime/VRCSDK/Plugins/UniTask/Runtime/UniTaskExtensions.cs#L17-L42", "sha": "f600f5a5df125ac4e01d35c43ed60d0d2b3c0594"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UnityBindingExtensions.BindTo", "code": "public static void BindTo(this IUniTaskAsyncEnumerable source, TObject monoBehaviour, Action bindAction, bool rebindOnError = true)\n where TObject : MonoBehaviour\n {\n BindToCore(source, monoBehaviour, bindAction, monoBehaviour.GetCancellationTokenOnDestroy(), rebindOnError).Forget();\n }", "docstring": "// -> Action", "url": "https://github.com/MMMaellon/SmartObjectSync/blob/f600f5a5df125ac4e01d35c43ed60d0d2b3c0594/Packages/com.vrchat.base/Runtime/VRCSDK/Plugins/UniTask/Runtime/UnityBindingExtensions.cs#L190-L194", "sha": "f600f5a5df125ac4e01d35c43ed60d0d2b3c0594"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AsyncUniTaskVoid.GetStatus", "code": "UniTaskStatus IUniTaskSource.GetStatus(short token)\n {\n return UniTaskStatus.Pending;\n }", "docstring": "// dummy interface implementation for TaskTracker.", "url": "https://github.com/MMMaellon/SmartObjectSync/blob/f600f5a5df125ac4e01d35c43ed60d0d2b3c0594/Packages/com.vrchat.base/Runtime/VRCSDK/Plugins/UniTask/Runtime/CompilerServices/StateMachineRunner.cs#L109-L112", "sha": "f600f5a5df125ac4e01d35c43ed60d0d2b3c0594"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WeakGenericCollectionFormatter.GetUninitializedObject", "code": "protected override object GetUninitializedObject()\n {\n return Activator.CreateInstance(this.SerializedType);\n }", "docstring": "/// ", "url": "https://github.com/MMMaellon/SmartObjectSync/blob/f600f5a5df125ac4e01d35c43ed60d0d2b3c0594/Packages/com.vrchat.worlds/Runtime/Udon/Serialization/OdinSerializer/Core/Formatters/GenericCollectionFormatter.cs#L229-L232", "sha": "f600f5a5df125ac4e01d35c43ed60d0d2b3c0594"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MethodInfoFormatter.DeserializeImplementation", "code": "protected override void DeserializeImplementation(ref T value, IDataReader reader)\n {\n #if false //vrc security patch \n string name;\n EntryType entry;\n\n entry = reader.PeekEntry(out name);\n\n if (entry == EntryType.StartOfArray)\n {\n // We have legacy ISerializable data for the MethodInfo, since in no case will data written by this formatter ever start with an array.\n // In this case, get the proper legacy formatter for this type and read the data using that.\n\n IFormatter serializableFormatter;\n serializableFormatter = new WeakSerializableFormatter(typeof(T));\n value = (T)(object)serializableFormatter.Deserialize(reader);\n return;\n }\n\n Type declaringType = null;\n string methodName = null;\n Type[] signature = null;\n Type[] genericArguments = null;\n\n while ((entry = reader.PeekEntry(out name)) != EntryType.EndOfNode && entry != EntryType.EndOfArray && entry != EntryType.EndOfStream)\n {\n switch (name)\n {\n case \"declaringType\":\n {\n var t = TypeSerializer.ReadValue(reader);\n\n if (t != null)\n {\n declaringType = t;\n }\n }\n break;\n\n case \"methodName\":\n {\n methodName = StringSerializer.ReadValue(reader);\n }\n break;\n\n case \"signature\":\n {\n signature = TypeArraySerializer.ReadValue(reader);\n }\n break;\n\n case \"genericArguments\":\n {\n genericArguments = TypeArraySerializer.ReadValue(reader);\n }\n break;\n\n default:\n reader.SkipEntry();\n break;\n }\n }\n\n if (declaringType == null)\n {\n reader.Context.Config.DebugContext.LogWarning(\"Missing declaring type of MethodInfo on deserialize.\");\n return;\n }\n\n if (methodName == null)\n {\n reader.Context.Config.DebugContext.LogError(\"Missing method name of MethodInfo on deserialize.\");\n return;\n }\n\n MethodInfo methodInfo;\n bool useSignature = false;\n bool wasAmbiguous = false;\n\n if (signature != null)\n {\n useSignature = true;\n\n for (int i = 0; i < signature.Length; i++)\n {\n if (signature[i] == null)\n {\n useSignature = false;\n break;\n }\n }\n }\n\n if (useSignature)\n {\n try\n {\n methodInfo = declaringType.GetMethod(methodName, Flags.AllMembers, null, signature, null);\n }\n catch (AmbiguousMatchException)\n {\n methodInfo = null;\n wasAmbiguous = true;\n }\n }\n else\n {\n try\n {\n methodInfo = declaringType.GetMethod(methodName, Flags.AllMembers);\n }\n catch (AmbiguousMatchException)\n {\n methodInfo = null;\n wasAmbiguous = true;\n }\n }\n\n if (methodInfo == null)\n {\n if (useSignature)\n {\n reader.Context.Config.DebugContext.LogWarning(\"Could not find method with signature \" + name + \"(\" + string.Join(\", \", signature.Select(p => p.GetNiceFullName()).ToArray()) + \") on type '\" + declaringType.FullName + (wasAmbiguous ? \"; resolution was ambiguous between multiple methods\" : string.Empty) + \".\");\n }\n else\n {\n reader.Context.Config.DebugContext.LogWarning(\"Could not find method with name \" + name + \" on type '\" + declaringType.GetNiceFullName() + (wasAmbiguous ? \"; resolution was ambiguous between multiple methods\" : string.Empty) + \".\");\n }\n\n return;\n }\n\n if (methodInfo.IsGenericMethodDefinition)\n {\n if (genericArguments == null)\n {\n reader.Context.Config.DebugContext.LogWarning(\"Method '\" + declaringType.GetNiceFullName() + \".\" + methodInfo.GetNiceName() + \"' to deserialize is a generic method definition, but no generic arguments were in the serialization data.\");\n return;\n }\n\n int argCount = methodInfo.GetGenericArguments().Length;\n\n if (genericArguments.Length != argCount)\n {\n reader.Context.Config.DebugContext.LogWarning(\"Method '\" + declaringType.GetNiceFullName() + \".\" + methodInfo.GetNiceName() + \"' to deserialize is a generic method definition, but there is the wrong number of generic arguments in the serialization data.\");\n return;\n }\n\n for (int i = 0; i < genericArguments.Length; i++)\n {\n if (genericArguments[i] == null)\n {\n reader.Context.Config.DebugContext.LogWarning(\"Method '\" + declaringType.GetNiceFullName() + \".\" + methodInfo.GetNiceName() + \"' to deserialize is a generic method definition, but one of the serialized generic argument types failed to bind on deserialization.\");\n return;\n }\n }\n\n try\n {\n methodInfo = methodInfo.MakeGenericMethod(genericArguments);\n }\n catch (Exception ex)\n {\n reader.Context.Config.DebugContext.LogWarning(\"Method '\" + declaringType.GetNiceFullName() + \".\" + methodInfo.GetNiceName() + \"' to deserialize is a generic method definition, but failed to create generic method from definition, using generic arguments '\" + string.Join(\", \", genericArguments.Select(p => p.GetNiceFullName()).ToArray()) + \"'. Method creation failed with an exception of type \" + ex.GetType().GetNiceFullName() + \", with the message: \" + ex.Message);\n return;\n }\n }\n\n try\n {\n value = (T)methodInfo;\n }\n catch (InvalidCastException)\n {\n reader.Context.Config.DebugContext.LogWarning(\"The serialized method '\" + declaringType.GetNiceFullName() + \".\" + methodInfo.GetNiceName() + \"' was successfully resolved into a MethodInfo reference of the runtime type '\" + methodInfo.GetType().GetNiceFullName() + \"', but failed to be cast to expected type '\" + typeof(T).GetNiceFullName() + \"'.\");\n return;\n }\n\n this.RegisterReferenceID(value, reader);\n #endif\n reader.Context.Config.DebugContext.LogWarning(\"MethodInfo deserialization has been removed for security.\"); //VRC\n }", "docstring": "/// ", "url": "https://github.com/MMMaellon/SmartObjectSync/blob/f600f5a5df125ac4e01d35c43ed60d0d2b3c0594/Packages/com.vrchat.worlds/Runtime/Udon/Serialization/OdinSerializer/Core/Formatters/MethodInfoFormatter.cs#L46-L227", "sha": "f600f5a5df125ac4e01d35c43ed60d0d2b3c0594"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PrefabDeserializeUtility.GetSceneObjectsToKeepSet", "code": "public static HashSet GetSceneObjectsToKeepSet(UnityEngine.Object unityObject, bool createIfDoesntExist)\n {\n HashSet keep;\n\n if (!SceneObjectsToKeepOnApply.TryGetValue(unityObject, out keep))\n {\n keep = new HashSet(ReferenceEqualityComparer.Default);\n SceneObjectsToKeepOnApply.Add(unityObject, keep);\n }\n\n return keep;\n }", "docstring": "/// ", "url": "https://github.com/MMMaellon/SmartObjectSync/blob/f600f5a5df125ac4e01d35c43ed60d0d2b3c0594/Packages/com.vrchat.worlds/Runtime/Udon/Serialization/OdinSerializer/Unity Integration/UnitySerializationUtility.cs#L137-L148", "sha": "f600f5a5df125ac4e01d35c43ed60d0d2b3c0594"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CustomConfigInfo.isDefaultConfig", "code": "public bool isDefaultConfig()\n {\n return fileName == content && content == \"Default\";\n }", "docstring": "// is default config?", "url": "https://github.com/MortezaBashsiz/CFScanner/blob/bcb96dd437e9cf90350ee5581df613b92380827f/windows/Classes/Config/CustomConfigs.cs#L115-L118", "sha": "bcb96dd437e9cf90350ee5581df613b92380827f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ScanResults.load", "code": "public bool load()\n {\n try\n {\n if (!File.Exists(resultsFileName))\n {\n return false;\n }\n\n string jsonString = File.ReadAllText(resultsFileName);\n loadedInstance = JsonSerializer.Deserialize(jsonString)!;\n\n }\n catch (Exception ex)\n {\n Tools.logStep($\"ScanResults.load() had exception: {ex.Message}\");\n return false;\n }\n\n return true;\n }", "docstring": "// load app config", "url": "https://github.com/MortezaBashsiz/CFScanner/blob/bcb96dd437e9cf90350ee5581df613b92380827f/windows/Classes/Config/ScanResults.cs#L45-L65", "sha": "bcb96dd437e9cf90350ee5581df613b92380827f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FtpFixture.InitializeAsync", "code": "public async Task InitializeAsync() {\n\t\t\tawait FtpContainer.StartAsync().ConfigureAwait(false);\n\t\t}", "docstring": "///", "url": "https://github.com/robinrodricks/FluentStorage/blob/7bac0df391a5d8df1abe19d965e2e9b476aa37e5/FluentStorage.Tests.FTP/FtpFixture.cs#L55-L57", "sha": "7bac0df391a5d8df1abe19d965e2e9b476aa37e5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GenericValidation.CheckBlobPrefix", "code": "public static void CheckBlobPrefix(string prefix) {\n\t\t\tif (prefix == null) return;\n\n\t\t\tstring[] parts = prefix.Split('/');\n\n\t\t\tforeach (string part in parts) {\n\t\t\t\tif (part.Length > MaxBlobPrefixLength)\n\t\t\t\t\tthrow new ArgumentException(\n\t\t\t\t\t string.Format(\"blob prefix cannot exceed {0} characters\", MaxBlobPrefixLength),\n\t\t\t\t\t nameof(prefix));\n\t\t\t}\n\t\t}", "docstring": "/// ", "url": "https://github.com/robinrodricks/FluentStorage/blob/7bac0df391a5d8df1abe19d965e2e9b476aa37e5/FluentStorage/GenericValidation.cs#L18-L29", "sha": "7bac0df391a5d8df1abe19d965e2e9b476aa37e5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ThrottlingTrollExtensions.UseThrottlingTroll", "code": "public static IApplicationBuilder UseThrottlingTroll(this IApplicationBuilder builder, Action options = null)\n {\n // Need to create this instance here, so that Assemblies are correctly initialized.\n var opt = new ThrottlingTrollOptions\n {\n Assemblies = new List { Assembly.GetCallingAssembly() }\n };\n\n if (options != null)\n {\n options(opt);\n }\n\n opt.GetConfigFunc = ThrottlingTrollCoreExtensions.MergeAllConfigSources(opt.Config, CollectDeclarativeConfig(opt.Assemblies), opt.GetConfigFunc, builder.ApplicationServices);\n\n if (opt.Log == null)\n {\n var logger = builder.ApplicationServices.GetService>();\n opt.Log = logger == null ? null : (l, s) => logger.Log(l, s);\n }\n\n // TODO: move default counter store creation into ThrottlingTroll\n opt.CounterStore ??= builder.ApplicationServices.GetService() ?? new MemoryCacheCounterStore();\n\n return builder.UseMiddleware(opt);\n }", "docstring": "/// ", "url": "https://github.com/ThrottlingTroll/ThrottlingTroll/blob/ae518fcb7b4a24633a806f389599e68457ff05fe/ThrottlingTroll.AspNet/ThrottlingTrollExtensions.cs#L24-L49", "sha": "ae518fcb7b4a24633a806f389599e68457ff05fe"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ThrottlingTrollHandlerExtensions.AddThrottlingTrollMessageHandler", "code": "public static IHttpClientBuilder AddThrottlingTrollMessageHandler(this IHttpClientBuilder builder, Action options = null)\n {\n return builder.AddThrottlingTrollMessageHandler(options == null ? null : (provider, opt) => options(opt));\n }", "docstring": "/// ", "url": "https://github.com/ThrottlingTroll/ThrottlingTroll/blob/ae518fcb7b4a24633a806f389599e68457ff05fe/ThrottlingTroll.Core/ThrottlingTrollHandler.cs#L533-L536", "sha": "ae518fcb7b4a24633a806f389599e68457ff05fe"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HapticRack.Awake", "code": "void Awake()\n\t\t{\n\t\t\tif ( linearMapping == null )\n\t\t\t{\n\t\t\t\tlinearMapping = GetComponent();\n\t\t\t}\n\t\t}", "docstring": "//-------------------------------------------------", "url": "https://github.com/whateverusername0/VRTRAKILL/blob/74dd472bf13baf273a8e2793712174050ff415be/VRTRAKILL.Plugin/New Unity Project/Assets/SteamVR/InteractionSystem/Core/Scripts/HapticRack.cs#L36-L42", "sha": "74dd472bf13baf273a8e2793712174050ff415be"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LinearAnimator.Awake", "code": "void Awake()\n\t\t{\n\t\t\tif ( animator == null )\n\t\t\t{\n\t\t\t\tanimator = GetComponent();\n\t\t\t}\n\n\t\t\tanimator.speed = 0.0f;\n\n\t\t\tif ( linearMapping == null )\n\t\t\t{\n\t\t\t\tlinearMapping = GetComponent();\n\t\t\t}\n\t\t}", "docstring": "//-------------------------------------------------", "url": "https://github.com/whateverusername0/VRTRAKILL/blob/74dd472bf13baf273a8e2793712174050ff415be/VRTRAKILL.Plugin/New Unity Project/Assets/SteamVR/InteractionSystem/Core/Scripts/LinearAnimator.cs#L23-L36", "sha": "74dd472bf13baf273a8e2793712174050ff415be"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SoundPlayOneshot.Awake", "code": "void Awake()\n\t\t{\n\t\t\tthisAudioSource = GetComponent();\n\n\t\t\tif ( playOnAwake )\n\t\t\t{\n\t\t\t\tPlay();\n\t\t\t}\n\t\t}", "docstring": "//-------------------------------------------------", "url": "https://github.com/whateverusername0/VRTRAKILL/blob/74dd472bf13baf273a8e2793712174050ff415be/VRTRAKILL.Plugin/New Unity Project/Assets/SteamVR/InteractionSystem/Core/Scripts/SoundPlayOneshot.cs#L28-L36", "sha": "74dd472bf13baf273a8e2793712174050ff415be"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ArrowHand.Awake", "code": "void Awake()\n\t\t{\n\t\t\tallowTeleport = GetComponent();\n\t\t\t//allowTeleport.teleportAllowed = true;\n\t\t\tallowTeleport.overrideHoverLock = false;\n\n\t\t\tarrowList = new List();\n\t\t}", "docstring": "//-------------------------------------------------", "url": "https://github.com/whateverusername0/VRTRAKILL/blob/74dd472bf13baf273a8e2793712174050ff415be/VRTRAKILL.Plugin/New Unity Project/Assets/SteamVR/InteractionSystem/Longbow/Scripts/ArrowHand.cs#L46-L53", "sha": "74dd472bf13baf273a8e2793712174050ff415be"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PlatformerMovementP.Update", "code": "[HarmonyPrefix] [HarmonyPatch(nameof(PlatformerMovement.Update))] static bool Update(PlatformerMovement __instance)\n {\n if (MonoSingleton.Instance.paused) return false;\n\n Vector2 vector = Vector2.zero;\n if (__instance.activated)\n {\n vector = Input.InputVars.MoveVector * Vars.Config.MovementMultiplier;\n __instance.movementDirection = Vector3.ClampMagnitude(vector.x * Vector3.right + vector.y * Vector3.forward, 1f);\n __instance.movementDirection = Quaternion.Euler(0f, __instance.platformerCamera.rotation.eulerAngles.y, 0f) * __instance.movementDirection;\n }\n else\n {\n __instance.rb.velocity = new Vector3(0f, __instance.rb.velocity.y, 0f);\n __instance.movementDirection = Vector3.zero;\n }\n\n if (__instance.movementDirection.magnitude > 0f) __instance.anim.SetBool(\"Running\", true);\n else __instance.anim.SetBool(\"Running\", false);\n\n if (__instance.rb.velocity.y < -100f)\n __instance.rb.velocity = new Vector3(__instance.rb.velocity.x, -100f, __instance.rb.velocity.z);\n\n if (__instance.activated && MonoSingleton.Instance.InputSource.Jump.WasPerformedThisFrame\n && !__instance.falling && !__instance.jumpCooldown) __instance.Jump(false, 1f);\n\n if (!__instance.groundCheck.onGround)\n {\n if (__instance.fallTime < 1f)\n {\n __instance.fallTime += Time.deltaTime * 5f;\n if (__instance.fallTime > 1f) __instance.falling = true;\n }\n else if (__instance.rb.velocity.y < -2f) __instance.fallSpeed = __instance.rb.velocity.y;\n }\n else __instance.fallTime = 0f;\n\n if (__instance.groundCheck.onGround && __instance.falling && !__instance.jumpCooldown)\n {\n __instance.falling = false;\n __instance.fallSpeed = 0f;\n __instance.groundCheck.heavyFall = false;\n }\n\n if (MonoSingleton.Instance.InputSource.Slide.WasPerformedThisFrame && __instance.groundCheck.onGround\n && __instance.activated && !__instance.sliding) __instance.StartSlide();\n\n RaycastHit raycastHit;\n if (MonoSingleton.Instance.InputSource.Slide.WasPerformedThisFrame\n && !__instance.groundCheck.onGround && !__instance.sliding && !__instance.jumping && __instance.activated\n && Physics.Raycast(__instance.groundCheck.transform.position + __instance.transform.up, __instance.transform.up * -1f, out raycastHit, 2f,\n LayerMaskDefaults.Get(LMD.Environment))) __instance.StartSlide();\n\n if (MonoSingleton.Instance.InputSource.Slide.WasCanceledThisFrame && __instance.sliding)\n __instance.StopSlide();\n\n if (__instance.sliding && __instance.activated)\n {\n __instance.slideLength += Time.deltaTime;\n if (__instance.currentSlideEffect != null) __instance.currentSlideEffect.transform.position = __instance.transform.position + __instance.dodgeDirection * 10f;\n if (__instance.slideSafety > 0f) __instance.slideSafety -= Time.deltaTime * 5f;\n if (__instance.groundCheck.onGround) __instance.currentSlideScrape.transform.position = __instance.transform.position + __instance.dodgeDirection;\n else __instance.currentSlideScrape.transform.position = Vector3.one * 5000f;\n }\n\n // Dash fix\n if (MonoSingleton.Instance.InputSource.Dodge.WasPerformedThisFrame && __instance.activated)\n {\n if (__instance.groundProperties && !__instance.groundProperties.canDash)\n if (!__instance.groundProperties.silentDashFail) Object.Instantiate(__instance.staminaFailSound);\n /* else */ if (__instance.boostCharge >= 100f)\n {\n if (__instance.sliding) __instance.StopSlide();\n __instance.boostLeft = 100f;\n __instance.boost = true;\n __instance.anim.Play(\"Dash\", -1, 0f);\n\n __instance.dodgeDirection = __instance.movementDirection.normalized * Vars.Config.MovementMultiplier;\n if (__instance.dodgeDirection == Vector3.zero)\n __instance.dodgeDirection = __instance.playerModel.forward * Vars.Config.MovementMultiplier;\n\n Quaternion identity = Quaternion.identity;\n identity.SetLookRotation(__instance.dodgeDirection * -1f);\n Object.Instantiate\n (__instance.dodgeParticle, __instance.transform.position + Vector3.up * 2f + __instance.dodgeDirection * 10f, identity).transform.localScale *= 2f;\n if (!MonoSingleton.Instance.majorEnabled || !MonoSingleton.Instance.infiniteStamina)\n __instance.boostCharge -= 100f;\n\n __instance.aud.clip = __instance.dodgeSound;\n __instance.aud.volume = 1f;\n __instance.aud.pitch = 1f;\n __instance.aud.Play();\n }\n else Object.Instantiate(__instance.staminaFailSound);\n }\n\n if (__instance.boostCharge != 300f && !__instance.sliding && !__instance.spinning)\n {\n float num = 1f;\n if (__instance.difficulty == 1) num = 1.5f;\n else if (__instance.difficulty == 0) num = 2f;\n\n __instance.boostCharge = Mathf.MoveTowards(__instance.boostCharge, 300f, 70f * Time.deltaTime * num);\n }\n\n if (__instance.spinCooldown > 0f) __instance.spinCooldown = Mathf.MoveTowards(__instance.spinCooldown, 0f, Time.deltaTime);\n\n if (__instance.activated && !__instance.spinning && __instance.spinCooldown <= 0f && !MonoSingleton.Instance.PerformingCheatMenuCombo()\n && (MonoSingleton.Instance.InputSource.Fire1.WasPerformedThisFrame\n || MonoSingleton.Instance.InputSource.Fire2.WasPerformedThisFrame\n || MonoSingleton.Instance.InputSource.Punch.WasPerformedThisFrame)\n && !MonoSingleton.Instance.paused) __instance.Spin();\n\n if (__instance.spinning) __instance.playerModel.Rotate(Vector3.up, Time.deltaTime * 3600f, Space.Self);\n else if (__instance.movementDirection.magnitude != 0f || __instance.boost)\n {\n Quaternion quaternion = Quaternion.LookRotation(__instance.movementDirection);\n if (__instance.boost)\n quaternion = Quaternion.LookRotation(__instance.dodgeDirection);\n __instance.playerModel.rotation =\n Quaternion.RotateTowards(__instance.playerModel.rotation, quaternion,\n (Quaternion.Angle(__instance.playerModel.rotation, quaternion) + 20f) * 35f * __instance.movementDirection.magnitude * Time.deltaTime);\n }\n\n if (__instance.cameraTrack)\n {\n if (!__instance.freeCamera)\n {\n __instance.CheckCameraTarget(false);\n __instance.platformerCamera.transform.position =\n Vector3.MoveTowards(__instance.platformerCamera.position, __instance.transform.position + __instance.cameraTarget,\n Time.deltaTime * 15f * (0.1f + Vector3.Distance(__instance.platformerCamera.position, __instance.cameraTarget)));\n\n __instance.platformerCamera.transform.rotation =\n Quaternion.RotateTowards(__instance.platformerCamera.transform.rotation,\n Quaternion.Euler(__instance.cameraRotation), Time.deltaTime * 15f * (0.1f + Vector3.Distance(__instance.platformerCamera.rotation.eulerAngles,\n __instance.cameraRotation)));\n }\n else if (!MonoSingleton.Instance.paused)\n {\n __instance.platformerCamera.transform.position = __instance.transform.position + __instance.defaultCameraTarget;\n __instance.platformerCamera.transform.rotation = Quaternion.Euler(__instance.defaultCameraRotation);\n\n Vector2 vector2 = MonoSingleton.Instance.InputSource.Look.ReadValue();\n if (!MonoSingleton.Instance.reverseY) __instance.rotationX += vector2.y * (MonoSingleton.Instance.mouseSensitivity / 10f);\n else __instance.rotationX -= vector2.y * (MonoSingleton.Instance.mouseSensitivity / 10f);\n\n if (!MonoSingleton.Instance.reverseX) __instance.rotationY += vector2.x * (MonoSingleton.Instance.mouseSensitivity / 10f);\n else __instance.rotationY -= vector2.x * (MonoSingleton.Instance.mouseSensitivity / 10f);\n\n if (__instance.rotationY > 180f) __instance.rotationY -= 360f;\n else if (__instance.rotationY < -180f) __instance.rotationY += 360f;\n\n __instance.rotationX = Mathf.Clamp(__instance.rotationX, -69f, 109f);\n\n float num2 = 2.5f;\n if (__instance.sliding || Physics.Raycast(__instance.transform.position + Vector3.up * 0.625f, Vector3.up, 2.5f, LayerMaskDefaults.Get(LMD.Environment)))\n num2 = 0.625f;\n\n Vector3 vector3 = __instance.transform.position + Vector3.up * num2;\n __instance.platformerCamera.RotateAround(vector3, Vector3.left, __instance.rotationX);\n __instance.platformerCamera.RotateAround(vector3, Vector3.up, __instance.rotationY);\n\n RaycastHit raycastHit2;\n if (Physics.SphereCast(vector3, 0.25f, __instance.platformerCamera.position - vector3, out raycastHit2,\n Vector3.Distance(vector3, __instance.platformerCamera.position), LayerMaskDefaults.Get(LMD.Environment)))\n __instance.platformerCamera.position = raycastHit2.point + 0.5f * raycastHit2.normal;\n }\n }\n\n RaycastHit raycastHit3;\n if (Physics.SphereCast(__instance.transform.position + Vector3.up, 0.5f, Vector3.down, out raycastHit3, float.PositiveInfinity,\n LayerMaskDefaults.Get(LMD.Environment), QueryTriggerInteraction.Ignore))\n {\n __instance.jumpShadow.position = raycastHit3.point + Vector3.up * 0.05f;\n __instance.jumpShadow.forward = raycastHit3.normal;\n }\n else\n {\n __instance.jumpShadow.position = __instance.transform.position - Vector3.up * 1000f;\n __instance.jumpShadow.forward = Vector3.up;\n }\n\n if (__instance.coinTimer > 0f) __instance.coinTimer = Mathf.MoveTowards(__instance.coinTimer, 0f, Time.deltaTime);\n if (__instance.coinEffectTimer > 0f) __instance.coinEffectTimer = Mathf.MoveTowards(__instance.coinEffectTimer, 0f, Time.deltaTime);\n else if (__instance.queuedCoins > 0) __instance.CoinGetEffect();\n\n if (__instance.invincible && __instance.extraHits < 3)\n {\n if (__instance.blinkTimer > 0f)\n __instance.blinkTimer = Mathf.MoveTowards(__instance.blinkTimer, 0f, Time.deltaTime);\n else\n {\n __instance.blinkTimer = 0.05f;\n if (__instance.playerModel.gameObject.activeSelf) __instance.playerModel.gameObject.SetActive(false);\n else __instance.playerModel.gameObject.SetActive(true);\n }\n }\n\n if (__instance.superTimer > 0f)\n {\n if (!NoWeaponCooldown.NoCooldown) __instance.superTimer = Mathf.MoveTowards(__instance.superTimer, 0f, Time.deltaTime);\n if (__instance.superTimer == 0f) __instance.GetHit();\n }\n\n return false;\n }", "docstring": "// change movement vector to vr one", "url": "https://github.com/whateverusername0/VRTRAKILL/blob/74dd472bf13baf273a8e2793712174050ff415be/VRTRAKILL.Plugin/Plugin/ULTRAKILL/Movement/Patches/PlatformerMovementP.cs#L12-L218", "sha": "74dd472bf13baf273a8e2793712174050ff415be"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HUDOptionsP.FlashImageTweak", "code": "[HarmonyPostfix] [HarmonyPatch(typeof(FlashImage), nameof(FlashImage.Flash))] static void FlashImageTweak(FlashImage __instance)\n {\n if (__instance.gameObject.name.Contains(\"White\") || __instance.gameObject.name.Contains(\"Black\"))\n __instance.transform.localScale *= 20;\n }", "docstring": "/// ", "url": "https://github.com/whateverusername0/VRTRAKILL/blob/74dd472bf13baf273a8e2793712174050ff415be/VRTRAKILL.Plugin/Plugin/ULTRAKILL/UI/Patches/HUDOptionsP.cs#L70-L74", "sha": "74dd472bf13baf273a8e2793712174050ff415be"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExperienceView.CreateRoundRectRgn", "code": "[System.Runtime.InteropServices.DllImport(\"gdi32.dll\")]\n private static extern IntPtr CreateRoundRectRgn(int nLeftRect, int nTopRect, int nRightRect, int nBottomRect, int nWidthEllipse, int nHeightEllipse);", "docstring": "// Import rounded corners", "url": "https://github.com/builtbybel/Bloatynosy/blob/48d27896be3557e1878094b86362514b8ab6cbca/src/Bloatynosy/Views/ExperienceView.cs#L137-L138", "sha": "48d27896be3557e1878094b86362514b8ab6cbca"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CalendarView.OnHeaderButtonClick", "code": "private void OnHeaderButtonClick(object? sender, RoutedEventArgs e)\n {\n // Header button should be hidden in Month mode.\n if (Mode == CalendarViewMode.Month) return;\n if (Mode == CalendarViewMode.Year)\n {\n Mode = CalendarViewMode.Decade;\n var range = DateTimeHelper.GetDecadeViewRangeByYear(ContextDate.Year!.Value);\n _dateContextSyncing = true;\n ContextDate = ContextDate.With(startYear: range.start, endYear: range.end);\n _dateContextSyncing = false;\n UpdateYearButtons();\n return;\n }\n\n if (Mode == CalendarViewMode.Decade)\n {\n Mode = CalendarViewMode.Century;\n var range = DateTimeHelper.GetCenturyViewRangeByYear(ContextDate.StartYear!.Value);\n _dateContextSyncing = true;\n ContextDate = ContextDate.With(startYear: range.start, endYear: range.end);\n _dateContextSyncing = false;\n UpdateYearButtons();\n }\n }", "docstring": "/// ", "url": "https://github.com/irihitech/Ursa.Avalonia/blob/ada538555472f8d623efda22a48c3d2ee7ed7e37/src/Ursa/Controls/DateTimePicker/CalendarView.cs#L246-L270", "sha": "ada538555472f8d623efda22a48c3d2ee7ed7e37"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EffectManager.ApplyAllFigmaEffectsToUnityNode", "code": "public static void ApplyAllFigmaEffectsToUnityNode(GameObject nodeGameObject,Node node, \n FigmaImportProcessData figmaImportProcessData)\n {\n foreach (var effect in node.effects) ApplyEffectToUnityNode(nodeGameObject,node,effect,figmaImportProcessData);\n \n }", "docstring": "/// ", "url": "https://github.com/simonoliver/UnityFigmaBridge/blob/dbd12bff29e8bc84e2682bb19495899711651455/UnityFigmaBridge/Editor/Nodes/EffectManager.cs#L20-L25", "sha": "dbd12bff29e8bc84e2682bb19495899711651455"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OffsetCalibrationTool.UnparentTarget", "code": "public void UnparentTarget(SteamVR_Action_Boolean fromAction, SteamVR_Input_Sources fromSource)\n {\n parent = target.parent;\n Mod.logger.LogInfo($\"Unparenting {target.name}, remembering {parent.name}\");\n target.SetParent(null, true);\n }", "docstring": "// Unparent Target, so we can finetune the position", "url": "https://github.com/Okabintaro/SubmersedVR/blob/726867165cc602ba929477d1f02991fd1b1ded9a/SubmersedVR/VR/OffsetCalibrationTool.cs#L58-L63", "sha": "726867165cc602ba929477d1f02991fd1b1ded9a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CardProperty.ConvertMsResourceToIcon", "code": "public static unsafe BitmapImage ConvertMsResourceToIcon(Uri iconPathUri, string packageFullName)\n {\n try\n {\n var indirectPathToResource = \"@{\" + packageFullName + \"? \" + iconPathUri.AbsoluteUri + \"}\";\n Span outputBuffer = new char[MaxBufferLength];\n\n fixed (char* outBufferPointer = outputBuffer)\n {\n fixed (char* resourcePathPointer = indirectPathToResource)\n {\n var res = PInvoke.SHLoadIndirectString(resourcePathPointer, new PWSTR(outBufferPointer), (uint)outputBuffer.Length, null);\n if (res.Succeeded)\n {\n var iconImageLocation = new string(outputBuffer.TrimEnd('\\0'));\n\n if (File.Exists(iconImageLocation))\n {\n var bitmap = new BitmapImage();\n bitmap.UriSource = new Uri(iconImageLocation);\n return bitmap;\n }\n }\n\n Log.Error($\"Failed to find icon image in path: {iconPathUri} for package: {packageFullName} due to error: 0x{res.Value:X}\");\n }\n }\n }\n catch (Exception ex)\n {\n Log.Error(ex, $\"Failed to load icon from ms-resource: {iconPathUri} for package: {packageFullName} due to error:\");\n }\n\n return new BitmapImage();\n }", "docstring": "/// ", "url": "https://github.com/microsoft/devhome/blob/d52734ce0e33a82af3313d24c3c2979c37b68bab/common/Environments/Models/CardProperty.cs#L115-L149", "sha": "d52734ce0e33a82af3313d24c3c2979c37b68bab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExperimentalFeaturesViewModel.IsFeaturePresent", "code": "private bool IsFeaturePresent(ExperimentalFeature experimentalFeature)\n {\n if (string.Equals(experimentalFeature.Id, \"FileExplorerSourceControlIntegration\", StringComparison.OrdinalIgnoreCase))\n {\n try\n {\n return ExtraFolderPropertiesWrapper.IsSupported();\n }\n catch (Exception)\n {\n return false;\n }\n }\n\n throw new NotImplementedException();\n }", "docstring": "/// ", "url": "https://github.com/microsoft/devhome/blob/d52734ce0e33a82af3313d24c3c2979c37b68bab/settings/DevHome.Settings/ViewModels/ExperimentalFeaturesViewModel.cs#L39-L54", "sha": "d52734ce0e33a82af3313d24c3c2979c37b68bab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TelemetryFactory.Get", "code": "public static T Get()\n where T : ITelemetry\n {\n return (T)(object)GetTelemetryInstance();\n }", "docstring": "/// ", "url": "https://github.com/microsoft/devhome/blob/d52734ce0e33a82af3313d24c3c2979c37b68bab/telemetry/DevHome.Telemetry/TelemetryFactory.cs#L32-L36", "sha": "d52734ce0e33a82af3313d24c3c2979c37b68bab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TestSystems.ShutDownAsync", "code": "public IAsyncOperation ShutDownAsync(string options) => throw new NotImplementedException();", "docstring": "// Unimplemented APIs", "url": "https://github.com/microsoft/devhome/blob/d52734ce0e33a82af3313d24c3c2979c37b68bab/tools/Environments/DevHome.Environments/TestModels/TestSystems.cs#L123-L123", "sha": "d52734ce0e33a82af3313d24c3c2979c37b68bab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WindowsDriverExtensions.Wait", "code": "public static DefaultWait> Wait(\n this WindowsDriver driver,\n TimeSpan timeout,\n TimeSpan pollingInterval)\n {\n return new(driver)\n {\n Timeout = timeout,\n PollingInterval = pollingInterval,\n };\n }", "docstring": "/// ", "url": "https://github.com/microsoft/devhome/blob/d52734ce0e33a82af3313d24c3c2979c37b68bab/uitest/Extensions/WindowsDriverExtensions.cs#L22-L32", "sha": "d52734ce0e33a82af3313d24c3c2979c37b68bab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Program.Main", "code": "public static async Task Main(string[] args)\n {\n // We declare for variables which we will use later, ggmlType, modelFileName, wavFileName and coreMlModelcName\n var ggmlType = GgmlType.Base;\n var modelFileName = \"ggml-base.bin\";\n var wavFileName = \"kennedy.wav\";\n var coreMlModelcName = \"ggml-base-encoder.mlmodelc\";\n\n // This section detects whether the \"ggml-base.bin\" file exists in our project disk. If it doesn't, it downloads it from the internet\n if (!File.Exists(modelFileName))\n {\n await DownloadModel(modelFileName, ggmlType);\n }\n\n // This sections detects whether the modelc directory (used by CoreML) is in out project disk. If it doesn't, it downloads it and extract it to the current folder.\n if (!Directory.Exists(coreMlModelcName))\n {\n // Note: The modelc directory needs to be extracted at the same level as the \"ggml-base.bin\" file (and the current executable).\n await WhisperGgmlDownloader.GetEncoderCoreMLModelAsync(ggmlType)\n .ExtractToPath(\".\");\n }\n\n // Optional logging from the native library\n \n using var whisperLogger = LogProvider.AddLogger((level, message) =>\n {\n Console.Write($\"{level}: {message}\");\n });\n\n // This section creates the whisperFactory object which is used to create the processor object.\n using var whisperFactory = WhisperFactory.FromPath(modelFileName);\n\n // This section creates the processor object which is used to process the audio file, it uses language `auto` to detect the language of the audio file.\n using var processor = whisperFactory.CreateBuilder()\n .WithLanguage(\"auto\")\n .Build();\n\n using var fileStream = File.OpenRead(wavFileName);\n\n // This section processes the audio file and prints the results (start time, end time and text) to the console.\n await foreach (var result in processor.ProcessAsync(fileStream))\n {\n Console.WriteLine($\"{result.Start}->{result.End}: {result.Text}\");\n }\n }", "docstring": "// This examples shows how to use Whisper.net to create a transcription from an audio file with 16Khz sample rate, using CoreML", "url": "https://github.com/sandrohanea/whisper.net/blob/eae8f62164799288329b48c3cafd9644c295ed24/examples/CoreML/Program.cs#L13-L57", "sha": "eae8f62164799288329b48c3cafd9644c295ed24"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Program.Main", "code": "public static async Task Main(string[] args)\n {\n // We declare three variables which we will use later, ggmlType, modelFileName and wavFileName\n var ggmlType = GgmlType.LargeV3Turbo;\n var modelFileName = \"ggml-largev3.bin\";\n var wavFileName = \"kennedy.wav\";\n\n using var whisperLogger = LogProvider.AddConsoleLogging(WhisperLogLevel.Debug);\n\n // This section detects whether the \"ggml-largev3.bin\" file exists in our project disk. If it doesn't, it downloads it from the internet\n if (!File.Exists(modelFileName))\n {\n await DownloadModel(modelFileName, ggmlType);\n }\n\n // This section creates the whisperFactory object which is used to create the processor object.\n using var whisperFactory = WhisperFactory.FromPath(modelFileName);\n\n // This section creates the processor object which is used to process the audio file, it uses language `auto` to detect the language of the audio file.\n using var processor = whisperFactory.CreateBuilder()\n .WithLanguage(\"auto\")\n .Build();\n\n using var fileStream = File.OpenRead(wavFileName);\n\n // This section processes the audio file and prints the results (start time, end time and text) to the console.\n await foreach (var result in processor.ProcessAsync(fileStream))\n {\n Console.WriteLine($\"{result.Start}->{result.End}: {result.Text}\");\n }\n }", "docstring": "// This examples shows how to use Whisper.net to create a transcription from an audio file with 16Khz sample rate.", "url": "https://github.com/sandrohanea/whisper.net/blob/eae8f62164799288329b48c3cafd9644c295ed24/examples/Vulkan/Program.cs#L14-L44", "sha": "eae8f62164799288329b48c3cafd9644c295ed24"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerExtension.MapPath", "code": "public static string MapPath(this string path)\n {\n return MapPath(path, false);\n }", "docstring": "/// ", "url": "https://github.com/zmrid/iMES-Factory/blob/d301552bd0e953882ea0b2162c0e17b36dd3a2a9/iMES.Net/iMES.Core/Extensions/ServerExtension.cs#L16-L19", "sha": "d301552bd0e953882ea0b2162c0e17b36dd3a2a9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MailHelper.Send", "code": "public static void Send(string title, string content, params string[] list)\n {\n Console.WriteLine(AppSetting.GetSection(\"ModifyMember\")[\"DateUTCField\"]);\n MailMessage message = new MailMessage\n {\n From = new MailAddress(address, name)//发送人邮箱\n };\n foreach (var item in list)\n {\n message.To.Add(item);//收件人地址\n }\n\n message.Subject = title;//发送邮件的标题\n\n message.Body = content;//发送邮件的内容\n //配置smtp服务地址\n SmtpClient client = new SmtpClient\n {\n Host = host,\n Port = port,//端口587\n EnableSsl = enableSsl,\n //发送人邮箱与授权密码\n Credentials = new NetworkCredential(address, authPwd)\n };\n client.Send(message);\n }", "docstring": "/// ", "url": "https://github.com/zmrid/iMES-Factory/blob/d301552bd0e953882ea0b2162c0e17b36dd3a2a9/iMES.Net/iMES.Core/Utilities/MailHelper.cs#L37-L62", "sha": "d301552bd0e953882ea0b2162c0e17b36dd3a2a9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WorkFlowManager.AddProcese", "code": "public static void AddProcese(T entity, bool rewrite = false,bool changeTableStatus=true) where T:class\n {\n string workTable = typeof(T).GetEntityTableName();\n\n Sys_WorkFlow workFlow = DBServerProvider.DbContext.Set()\n .Where(x => x.WorkTable == workTable)\n .Include(x => x.Sys_WorkFlowStep)\n .FirstOrDefault();\n\n if (workFlow == null || workFlow.Sys_WorkFlowStep.Count == 0) return;\n //重新生成流程\n if (rewrite)\n {\n Rewrite(entity, workFlow,changeTableStatus);\n return;\n }\n\n var userInfo = UserContext.Current.UserInfo;\n Guid workFlowTable_Id = Guid.NewGuid();\n Sys_WorkFlowTable workFlowTable = new Sys_WorkFlowTable()\n {\n WorkFlowTable_Id = workFlowTable_Id,\n AuditStatus = (int)AuditStatus.审核中,\n CurrentOrderId = 1,\n Enable = 1,\n WorkFlow_Id = workFlow.WorkFlow_Id,\n WorkName = workFlow.WorkName,\n WorkTable = workTable,\n WorkTableKey = typeof(T).GetKeyProperty().GetValue(entity).ToString(),\n WorkTableName = workFlow.WorkTableName,\n CreateID = userInfo.User_Id,\n CreateDate = DateTime.Now,\n Creator = userInfo.UserTrueName,\n Sys_WorkFlowTableStep = workFlow.Sys_WorkFlowStep.OrderBy(x => x.OrderId).Select(s => new Sys_WorkFlowTableStep()\n {\n Sys_WorkFlowTableStep_Id = Guid.NewGuid(),\n WorkFlowTable_Id = workFlowTable_Id,\n WorkFlow_Id = s.WorkFlow_Id,\n StepId = s.StepId,\n StepName = s.StepName,\n AuditId = s.StepType == (int)AuditType.用户审批 ? s.StepValue : null,\n StepType = s.StepType,\n StepValue = s.StepValue,\n OrderId = s.OrderId,\n Enable = 1,\n CreateDate = DateTime.Now,\n }).ToList()\n };\n DBServerProvider.DbContext.Set().Add(workFlowTable);\n DBServerProvider.DbContext.SaveChanges();\n }", "docstring": "/// ", "url": "https://github.com/zmrid/iMES-Factory/blob/d301552bd0e953882ea0b2162c0e17b36dd3a2a9/iMES.Net/iMES.Core/WorkFlow/WorkFlowManager.cs#L106-L156", "sha": "d301552bd0e953882ea0b2162c0e17b36dd3a2a9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FilterCriteria.AppendCriteria", "code": "public void AppendCriteria([NotNull]FilterCriteria criteria)\n {\n if (criteria.FilteringMethod != this.FilteringMethod)\n throw new Exception(\"The filtering method used is incompatible and may cause problems due to differences in syntax.\");\n\n FilterClauses.AddRange(criteria.FilterClauses);\n\n foreach (var param in criteria.FilterParameters)\n {\n FilterParameters.Add(param.Key, param.Value);\n }\n }", "docstring": "/// ", "url": "https://github.com/shesha-io/shesha-framework/blob/bb69efce02d772f166ce621f6993a795f94c35df/shesha-core/src/Shesha.Framework/Domain/FilterCriteria.cs#L37-L48", "sha": "bb69efce02d772f166ce621f6993a795f94c35df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SwaggerOperationFilter.GetSingleAttributeOfMemberOrDeclaringTypeOrDefault", "code": "public static TAttribute GetSingleAttributeOfMemberOrDeclaringTypeOrDefault(MemberInfo memberInfo, TAttribute defaultValue = default, bool inherit = true)\n where TAttribute : class\n {\n return memberInfo.GetCustomAttributes(true).OfType().FirstOrDefault()\n ?? memberInfo.ReflectedType?.GetTypeInfo().GetCustomAttributes(true).OfType().FirstOrDefault()\n ?? defaultValue;\n }", "docstring": "/// ", "url": "https://github.com/shesha-io/shesha-framework/blob/bb69efce02d772f166ce621f6993a795f94c35df/shesha-core/src/Shesha.Framework/Swagger/SwaggerOperationFilter.cs#L144-L150", "sha": "bb69efce02d772f166ce621f6993a795f94c35df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CodeNamingHelper.ValidateCodeIdentifier", "code": "public static void ValidateCodeIdentifier(string identifier)\n {\n if (!CSharpSyntaxHelper.IsValidIdentifier(identifier))\n throw new IdentifierIsNotValidException(identifier);\n }", "docstring": "/// ", "url": "https://github.com/shesha-io/shesha-framework/blob/bb69efce02d772f166ce621f6993a795f94c35df/shesha-core/src/Shesha.Framework/Utilities/CodeNamingHelper.cs#L25-L29", "sha": "bb69efce02d772f166ce621f6993a795f94c35df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ModuleLocator.GetModuleName", "code": "public string GetModuleName(Type migrationType)\n {\n return migrationType.GetConfigurableModuleName();\n }", "docstring": "/// inheritedDoc", "url": "https://github.com/shesha-io/shesha-framework/blob/bb69efce02d772f166ce621f6993a795f94c35df/shesha-core/src/Shesha.NHibernate/NHibernate/ModuleLocator.cs#L14-L17", "sha": "bb69efce02d772f166ce621f6993a795f94c35df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "M20230516104216.Up", "code": "public override void Up()\n {\n\n Create.Table(\"SheshaFunctionalTests_Books\")\n .WithIdAsGuid()\n .WithFullAuditColumns()\n .WithColumn(\"Frwk_Discriminator\").AsString(SheshaDatabaseConsts.DiscriminatorMaxSize).NotNullable()\n .WithColumn(\"Name\").AsString().Nullable()\n .WithColumn(\"Description\").AsString().Nullable()\n .WithColumn(\"Price\").AsDecimal();\n\n\n Create.Table(\"SheshaFunctionalTests_Schools\")\n .WithIdAsGuid()\n .WithFullAuditColumns()\n .WithColumn(\"Frwk_Discriminator\").AsString(SheshaDatabaseConsts.DiscriminatorMaxSize).NotNullable()\n .WithColumn(\"Name\").AsString().Nullable()\n .WithColumn(\"Latitude\").AsDecimal().Nullable()\n .WithColumn(\"Longitude\").AsDecimal()\n .WithColumn(\"ContactNumber\").AsString().Nullable();\n\n\n Create.Table(\"SheshaFunctionalTests_Subjects\")\n .WithIdAsGuid()\n .WithFullAuditColumns()\n .WithColumn(\"Frwk_Discriminator\").AsString(SheshaDatabaseConsts.DiscriminatorMaxSize).NotNullable()\n .WithColumn(\"Name\").AsString().Nullable()\n .WithColumn(\"Description\").AsString().Nullable()\n .WithColumn(\"Total\").AsDecimal().Nullable();\n\n\n Alter.Table(\"SheshaFunctionalTests_Schools\").AddForeignKeyColumn(\"AddressId\", \"Core_Addresses\").Nullable();\n\n Alter.Table(\"SheshaFunctionalTests_Schools\").AddForeignKeyColumn(\"HeadLeaderId\", \"Core_Persons\").Nullable();\n\n Alter.Table(\"SheshaFunctionalTests_Subjects\").AddForeignKeyColumn(\"BookId\", \"SheshaFunctionalTests_Books\").Nullable();\n\n Alter.Table(\"SheshaFunctionalTests_Subjects\").AddForeignKeyColumn(\"SchoolId\", \"SheshaFunctionalTests_Schools\").Nullable();\n\n }", "docstring": "/// ", "url": "https://github.com/shesha-io/shesha-framework/blob/bb69efce02d772f166ce621f6993a795f94c35df/shesha-functional-tests/backend/src/Module/Boxfusion.SheshaFunctionalTests.Common.Domain/Migrations/M20230516104216.cs#L14-L53", "sha": "bb69efce02d772f166ce621f6993a795f94c35df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "M20240219144300.Up", "code": "public override void Up()\n {\n Alter.Table(\"SheshaFunctionalTests_MembershipPayments\").AddColumn(\"PaymentTypeLkp\").AsInt64().Nullable();\n }", "docstring": "/// ", "url": "https://github.com/shesha-io/shesha-framework/blob/bb69efce02d772f166ce621f6993a795f94c35df/shesha-functional-tests/backend/src/Module/Boxfusion.SheshaFunctionalTests.Common.Domain/Migrations/M20240219144300.cs#L14-L17", "sha": "bb69efce02d772f166ce621f6993a795f94c35df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AppConfig.SwappedBrightness", "code": "public static bool SwappedBrightness()\n {\n return ContainsModel(\"FA506IEB\") || ContainsModel(\"FA506IH\") || ContainsModel(\"FA506IC\") || ContainsModel(\"FX506LU\") || ContainsModel(\"FX506IC\") || ContainsModel(\"FX506LH\") || ContainsModel(\"FA506IV\") || ContainsModel(\"FA706IC\") || ContainsModel(\"FA706IH\");\n }", "docstring": "// Devices with bugged bios command to change brightness", "url": "https://github.com/seerge/g-helper/blob/b5428ff2e8485e95a9ac29541dcb961337bd663c/app/AppConfig.cs#L408-L411", "sha": "b5428ff2e8485e95a9ac29541dcb961337bd663c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GDTaskPlayerLoopAutoload.LocalAddContinuation", "code": "public void LocalAddContinuation(PlayerLoopTiming timing, Action continuation)\n {\n var q = yielders[(int)timing];\n if (q == null)\n {\n ThrowInvalidLoopTiming(timing);\n }\n q.Enqueue(continuation);\n }", "docstring": "// NOTE: Continuation means a asynchronous task invoked by another task after the other task finishes.", "url": "https://github.com/Fractural/GDTask/blob/661244bba9839c8633d2106457ef5be2faa228df/addons/GDTask/Autoload/GDTaskPlayerLoopAutoload.cs#L48-L56", "sha": "661244bba9839c8633d2106457ef5be2faa228df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EventFlags.MatchesWeekOfMonth", "code": "public static bool MatchesWeekOfMonth(this DateTime date, int recurrence)\n => Matches(recurrence, 1 << (date.Day / 8));", "docstring": "// or has been set + flag matches", "url": "https://github.com/nichealpham/minishop-composer/blob/68d2f4e875a2bd9d7c3c2c9226795d25c239ecd0/api/AppGlobal/Commons/Helpers.cs", "sha": "68d2f4e875a2bd9d7c3c2c9226795d25c239ecd0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NetworkTool.IpToHostAsync", "code": "public static async Task<(string Host, string BaseHost)> IpToHostAsync(string ip)\n {\n string result = string.Empty;\n string baseHost = string.Empty;\n if (!OperatingSystem.IsWindows()) return (result, baseHost);\n if (!await IsInternetAliveByNicAsync()) return (result, baseHost); // nslookup takes time when there is no internet access\n\n string content = await ProcessManager.ExecuteAsync(\"nslookup\", null, ip, true, true);\n if (string.IsNullOrEmpty(content)) return (result, baseHost);\n content = content.ToLower();\n string[] split = content.Split(Environment.NewLine);\n for (int n = 0; n < split.Length; n++)\n {\n string line = split[n];\n if (line.Contains(\"name:\"))\n {\n result = line.Replace(\"name:\", string.Empty).Trim();\n if (result.Contains('.'))\n {\n GetHostDetails(result, 0, out _, out _, out baseHost, out _, out _, out _);\n }\n break;\n }\n }\n\n return (result, baseHost);\n }", "docstring": "/// ", "url": "https://github.com/msasanmh/DNSveil/blob/3f107516dddb1f4eba472665b925994f06a29162/MsmhToolsClass/MsmhToolsClass/NetworkTool.cs#L79-L105", "sha": "3f107516dddb1f4eba472665b925994f06a29162"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WebAPI.Github_Latest_Release_Async", "code": "public static async Task> Github_Latest_Release_Async(string owner, string repo, int timeoutMs = 5000)\n {\n List relaeseURLs = new();\n\n try\n {\n Uri apiMain = new($\"https://api.github.com/repos/{owner}/{repo}/releases/latest\");\n HttpRequest hr = new()\n {\n Method = HttpMethod.Get,\n AllowInsecure = true,\n TimeoutMS = timeoutMs,\n URI = apiMain,\n Headers =\n {\n { \"accept\", \"application/json\" }\n }\n };\n HttpRequestResponse hrr = await HttpRequest.SendAsync(hr).ConfigureAwait(false);\n string json = Encoding.UTF8.GetString(hrr.Data);\n\n List path = new()\n {\n new JsonTool.JsonPath() {Key = \"assets\", Count = 1, Conditions = new()},\n new JsonTool.JsonPath() {Key = \"browser_download_url\", Conditions = new()}\n };\n\n relaeseURLs = JsonTool.GetValues(json, path);\n }\n catch (Exception ex)\n {\n Debug.WriteLine(\"WebAPI Github_Latest_Release_Async: \" + ex.Message);\n }\n\n return relaeseURLs;\n }", "docstring": "/// ", "url": "https://github.com/msasanmh/DNSveil/blob/3f107516dddb1f4eba472665b925994f06a29162/MsmhToolsClass/MsmhToolsClass/WebAPI.cs#L37-L72", "sha": "3f107516dddb1f4eba472665b925994f06a29162"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ToonMeshUtility.IntersectRayMesh", "code": "public static bool IntersectRayMesh(Ray ray, Mesh mesh, Matrix4x4 matrix, out RaycastHit hit)\n {\n object[] parameters = { ray, mesh, matrix, null };\n bool result = (bool) MethodIntersectRayMesh.Invoke(null, parameters);\n hit = (RaycastHit) parameters[3];\n return result;\n }", "docstring": "// Adapted from https://gist.github.com/MattRix/9205bc62d558fef98045", "url": "https://github.com/Delt06/toon-rp/blob/687b7b26747a8bd6b0905299e5248bbca0285a77/Packages/com.deltation.toon-rp/Editor/VertexColorPaint/ToonMeshUtility.cs#L82-L88", "sha": "687b7b26747a8bd6b0905299e5248bbca0285a77"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Application.Main", "code": "static void Main(string[] args)\n {\n // if you want to use a different Application Delegate class from \"AppDelegate\"\n // you can specify it here.\n UIApplication.Main(args, null, typeof(AppDelegate));\n }", "docstring": "// This is the main entry point of the application.", "url": "https://github.com/AvaloniaUI/Avalonia.Labs/blob/81ee313436d2305f7c169e0a099fe84d33d50624/samples/Avalonia.Labs.Catalog.iOS/Main.cs#L8-L13", "sha": "81ee313436d2305f7c169e0a099fe84d33d50624"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ContentDialog.ShowAsync", "code": "public async Task ShowAsync() => await ShowAsyncCore(null);", "docstring": "/// ", "url": "https://github.com/AvaloniaUI/Avalonia.Labs/blob/81ee313436d2305f7c169e0a099fe84d33d50624/src/Avalonia.Labs.Controls/ContentDialog/ContentDialog.cs#L113-L113", "sha": "81ee313436d2305f7c169e0a099fe84d33d50624"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReedSolomonEncoder.Encode", "code": "internal static byte[] Encode(byte[] dataBytes, int numECBytes, GeneratorPolynomial generatorPoly)\n\t{\n\t\tint dataLength = dataBytes.Length;\n if (generatorPoly == null)\n throw new ArgumentNullException(nameof(generatorPoly));\n\n if (dataLength == 0)\n\t\t{\n\t\t\tthrow new ArgumentException(\"There is no data bytes to encode.\");\n\t\t}\n\n\t\tif (numECBytes <= 0)\n\t\t{\n\t\t\tthrow new ArgumentException(\"No Error Correction bytes.\");\n\t\t}\n\n\t\tint[] toEncode = ConvertToIntArray(dataBytes, dataLength, numECBytes);\n\n\t\tPolynomial generator = generatorPoly.GetGenerator(numECBytes);\n\n\t\tPolynomial dataPoly = new(generator.GField, toEncode);\n\n\t\tPolyDivideStruct divideResult = dataPoly.Divide(generator);\n\n\t\tint[] remainderCoeffs = divideResult.Remainder.Coefficients;\n\n\t\treturn ConvertTosByteArray(remainderCoeffs, numECBytes);\n\t}", "docstring": "/// ", "url": "https://github.com/AvaloniaUI/Avalonia.Labs/blob/81ee313436d2305f7c169e0a099fe84d33d50624/src/Avalonia.Labs.Qr/Encoding/ReedSolomon/ReedSolomonEncoder.cs#L14-L41", "sha": "81ee313436d2305f7c169e0a099fe84d33d50624"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UrlUtility.AppendQuerytoUrl", "code": "public static void AppendQuerytoUrl(ref string url, string parameterName, string query) {\n if (query == null) {\n return;\n }\n url += parameterName + \"=\" + query + \"&\";\n }", "docstring": "/// ", "url": "https://github.com/julienkay/com.doji.genesis/blob/696728293c8ccf78eb5114f58dfcac2516ce1c70/Packages/com.doji.genesis/Runtime/Scripts/Utils/UrlUtility.cs#L11-L16", "sha": "696728293c8ccf78eb5114f58dfcac2516ce1c70"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OpenAIAssistant_Streaming.InvokeAgentAsync", "code": "private async Task InvokeAgentAsync(OpenAIAssistantAgent agent, string threadId, string input)\n {\n ChatMessageContent message = new(AuthorRole.User, input);\n await agent.AddChatMessageAsync(threadId, message);\n this.WriteAgentChatMessage(message);\n\n ChatHistory history = [];\n\n bool isFirst = false;\n bool isCode = false;\n await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(threadId, messages: history))\n {\n if (string.IsNullOrEmpty(response.Content))\n {\n StreamingFunctionCallUpdateContent? functionCall = response.Items.OfType().SingleOrDefault();\n if (functionCall != null)\n {\n Console.WriteLine($\"\\n# {response.Role} - {response.AuthorName ?? \"*\"}: FUNCTION CALL - {functionCall.Name}\");\n }\n\n continue;\n }\n\n // Differentiate between assistant and tool messages\n if (isCode != (response.Metadata?.ContainsKey(OpenAIAssistantAgent.CodeInterpreterMetadataKey) ?? false))\n {\n isFirst = false;\n isCode = !isCode;\n }\n\n if (!isFirst)\n {\n Console.WriteLine($\"\\n# {response.Role} - {response.AuthorName ?? \"*\"}:\");\n isFirst = true;\n }\n\n Console.WriteLine($\"\\t > streamed: '{response.Content}'\");\n }\n\n foreach (ChatMessageContent content in history)\n {\n this.WriteAgentChatMessage(content);\n }\n }", "docstring": "// Local function to invoke agent and display the conversation messages.", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs#L109-L152", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChatHistoryExtensions.GetSystemMessage", "code": "internal static ChatMessageContent? GetSystemMessage(this IReadOnlyList chatHistory)\n {\n return chatHistory.FirstOrDefault(m => m.Role == AuthorRole.System);\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/samples/Concepts/ChatCompletion/ChatHistoryReducers/ChatHistoryExtensions.cs#L22-L25", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AutoFunctionInvocationFiltering.AutoFunctionInvocationFilterAsync", "code": "[Fact]\n public async Task AutoFunctionInvocationFilterAsync()\n {\n var builder = Kernel.CreateBuilder();\n\n builder.AddOpenAIChatCompletion(\"gpt-4\", TestConfiguration.OpenAI.ApiKey);\n\n // This filter outputs information about auto function invocation and returns overridden result.\n builder.Services.AddSingleton(new AutoFunctionInvocationFilter(this.Output));\n\n var kernel = builder.Build();\n\n var function = KernelFunctionFactory.CreateFromMethod(() => \"Result from function\", \"MyFunction\");\n\n kernel.ImportPluginFromFunctions(\"MyPlugin\", [function]);\n\n var executionSettings = new OpenAIPromptExecutionSettings\n {\n FunctionChoiceBehavior = FunctionChoiceBehavior.Required([function], autoInvoke: true)\n };\n\n var result = await kernel.InvokePromptAsync(\"Invoke provided function and return result\", new(executionSettings));\n\n Console.WriteLine(result);\n\n // Output:\n // Request sequence number: 0\n // Function sequence number: 0\n // Total number of functions: 1\n // Result from auto function invocation filter.\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs#L14-L44", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MethodFunctions_Advanced.MethodFunctionsChainingAsync", "code": "[Fact]\n public async Task MethodFunctionsChainingAsync()\n {\n Console.WriteLine(\"Running Method Function Chaining example...\");\n\n var kernel = new Kernel();\n\n var functions = kernel.ImportPluginFromType();\n\n var customType = await kernel.InvokeAsync(functions[\"Function1\"]);\n\n Console.WriteLine($\"CustomType.Number: {customType!.Number}\"); // 2\n Console.WriteLine($\"CustomType.Text: {customType.Text}\"); // From Function1 + From Function2\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/samples/Concepts/Functions/MethodFunctions_Advanced.cs#L18-L31", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VectorStore_VectorSearch_MultiVector.CreateProductRecords", "code": "private static IEnumerable CreateProductRecords()\n {\n yield return new Product\n {\n Key = 1,\n Description = \"Premium coffee maker that allows you to make up to 20 types of drinks with one machine.\",\n FeatureList = [\"Milk Frother\", \"Easy to use\", \"One button operation\", \"Stylish design\"]\n };\n\n yield return new Product\n {\n Key = 2,\n Description = \"Value coffee maker that gives you what you need at a good price.\",\n FeatureList = [\"Simple design\", \"Easy to clean\"]\n };\n\n yield return new Product\n {\n Key = 3,\n Description = \"Efficient vacuum cleaner\",\n FeatureList = [\"1000W power\", \"Hard floor tool\", \"Bagless\", \"Corded\"]\n };\n\n yield return new Product\n {\n Key = 4,\n Description = \"High performance handheld vacuum cleaner\",\n FeatureList = [\"Pet hair tool\", \"2000W power\", \"Hard floor tool\", \"Bagless\", \"Cordless\"]\n };\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/samples/Concepts/Memory/VectorStore_VectorSearch_MultiVector.cs#L92-L121", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LegacyMenuPlugin.GetSpecials", "code": "[KernelFunction, Description(\"Provides a list of specials from the menu.\")]\n [System.Diagnostics.CodeAnalysis.SuppressMessage(\"Design\", \"CA1024:Use properties where appropriate\", Justification = \"Too smart\")]\n public string[] GetSpecials(KernelArguments? arguments)\n {\n return\n [\n \"Special Soup: Clam Chowder\",\n \"Special Salad: Cobb Salad\",\n \"Special Drink: Chai Tea\",\n ];\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs#L13-L23", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Step7_Observability.ObservabilityWithFiltersAsync", "code": "[Fact]\n public async Task ObservabilityWithFiltersAsync()\n {\n // Create a kernel with OpenAI chat completion\n IKernelBuilder kernelBuilder = Kernel.CreateBuilder();\n kernelBuilder.AddOpenAIChatCompletion(\n modelId: TestConfiguration.OpenAI.ChatModelId,\n apiKey: TestConfiguration.OpenAI.ApiKey);\n\n kernelBuilder.Plugins.AddFromType();\n\n // Add filter using DI\n kernelBuilder.Services.AddSingleton(this.Output);\n kernelBuilder.Services.AddSingleton();\n\n Kernel kernel = kernelBuilder.Build();\n\n // Add filter without DI\n kernel.PromptRenderFilters.Add(new MyPromptFilter(this.Output));\n\n // Invoke the kernel with a prompt and allow the AI to automatically invoke functions\n OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };\n Console.WriteLine(await kernel.InvokePromptAsync(\"How many days until Christmas? Explain your thinking.\", new(settings)));\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/samples/GettingStarted/Step7_Observability.cs#L15-L38", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LearnBaseTest.ReadLine", "code": "public string? ReadLine()\n {\n if (SimulatedInputTextIndex < SimulatedInputText.Count)\n {\n return SimulatedInputText[SimulatedInputTextIndex++];\n }\n\n return null;\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/samples/LearnResources/MicrosoftLearn/LearnBaseTest.cs#L23-L31", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OpenAIAssistantChannel.ReceiveAsync", "code": "protected override async Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken)\n {\n foreach (ChatMessageContent message in history)\n {\n await AssistantThreadActions.CreateMessageAsync(this._client, this._threadId, message, cancellationToken).ConfigureAwait(false);\n }\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs#L20-L26", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OpenAIAssistantAgentLogMessages.LogOpenAIAssistantAgentCreatingChannel", "code": "[LoggerMessage(\n EventId = 0,\n Level = LogLevel.Debug,\n Message = \"[{MethodName}] Creating assistant thread for {ChannelType}.\")]\n public static partial void LogOpenAIAssistantAgentCreatingChannel(\n this ILogger logger,\n string methodName,\n string channelType);", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Agents/OpenAI/Logging/OpenAIAssistantAgentLogMessages.cs#L22-L29", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AssistantMessageFactoryTests.VerifyAssistantMessageAdapterCreateOptionsDefault", "code": "[Fact]\n public void VerifyAssistantMessageAdapterCreateOptionsDefault()\n {\n // Arrange (Setup message with null metadata)\n ChatMessageContent message = new(AuthorRole.User, \"test\");\n\n // Act: Create options\n MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message);\n\n // Assert\n Assert.NotNull(options);\n Assert.Empty(options.Metadata);\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs#L21-L33", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HuggingFaceServiceCollectionExtensions.AddHuggingFaceTextGeneration", "code": "public static IServiceCollection AddHuggingFaceTextGeneration(\n this IServiceCollection services,\n string model,\n Uri? endpoint = null,\n string? apiKey = null,\n string? serviceId = null,\n HttpClient? httpClient = null)\n {\n Verify.NotNull(services);\n\n return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>\n new HuggingFaceTextGenerationService(\n model,\n endpoint,\n apiKey,\n HttpClientProvider.GetHttpClient(httpClient, serviceProvider),\n serviceProvider.GetService()));\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFaceServiceCollectionExtensions.cs#L31-L48", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PineconeServiceCollectionExtensions.AddPineconeVectorStore", "code": "public static IServiceCollection AddPineconeVectorStore(this IServiceCollection services, PineconeVectorStoreOptions? options = default, string? serviceId = default)\n {\n // If we are not constructing the PineconeClient, add the IVectorStore as transient, since we\n // cannot make assumptions about how PineconeClient is being managed.\n services.AddKeyedTransient(\n serviceId,\n (sp, obj) =>\n {\n var pineconeClient = sp.GetRequiredService();\n var selectedOptions = options ?? sp.GetService();\n\n return new PineconeVectorStore(\n pineconeClient,\n selectedOptions);\n });\n\n return services;\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeServiceCollectionExtensions.cs#L22-L39", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PostgresVectorStoreUtils.WrapAsyncEnumerableAsync", "code": "public static async IAsyncEnumerable WrapAsyncEnumerableAsync(IAsyncEnumerable asyncEnumerable, string operationName, string? collectionName = null)\n {\n var enumerator = asyncEnumerable.ConfigureAwait(false).GetAsyncEnumerator();\n\n var nextResult = await GetNextAsync(enumerator, operationName, collectionName).ConfigureAwait(false);\n while (nextResult.more)\n {\n yield return nextResult.item;\n nextResult = await GetNextAsync(enumerator, operationName, collectionName).ConfigureAwait(false);\n }\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Connectors/Connectors.Memory.Postgres/PostgresVectorStoreUtils.cs#L22-L32", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WeaviateVectorStoreCollectionCreateMapping.MapToSchema", "code": "public static WeaviateCollectionSchema MapToSchema(\n string collectionName,\n IEnumerable dataProperties,\n IEnumerable vectorProperties,\n IReadOnlyDictionary storagePropertyNames)\n {\n var schema = new WeaviateCollectionSchema(collectionName);\n\n // Handle data properties.\n foreach (var property in dataProperties)\n {\n schema.Properties.Add(new WeaviateCollectionSchemaProperty\n {\n Name = storagePropertyNames[property.DataModelPropertyName],\n DataType = [MapType(property.PropertyType)],\n IndexFilterable = property.IsFilterable,\n IndexSearchable = property.IsFullTextSearchable\n });\n }\n\n // Handle vector properties.\n foreach (var property in vectorProperties)\n {\n var vectorPropertyName = storagePropertyNames[property.DataModelPropertyName];\n schema.VectorConfigurations.Add(vectorPropertyName, new WeaviateCollectionSchemaVectorConfig\n {\n VectorIndexType = MapIndexKind(property.IndexKind, vectorPropertyName),\n VectorIndexConfig = new WeaviateCollectionSchemaVectorIndexConfig\n {\n Distance = MapDistanceFunction(property.DistanceFunction, vectorPropertyName)\n }\n });\n }\n\n return schema;\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs#L25-L60", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OnnxRuntimeGenAIPromptExecutionSettings.FromExecutionSettings", "code": "[RequiresUnreferencedCode(\"This method uses reflection to serialize and deserialize the execution settings, making it incompatible with AOT scenarios.\")]\n [RequiresDynamicCode(\"This method uses reflection to serialize and deserialize the execution settings, making it incompatible with AOT scenarios.\")]\n public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings)\n {\n if (executionSettings is null)\n {\n return new OnnxRuntimeGenAIPromptExecutionSettings();\n }\n\n if (executionSettings is OnnxRuntimeGenAIPromptExecutionSettings settings)\n {\n return settings;\n }\n\n var json = JsonSerializer.Serialize(executionSettings, executionSettings.GetType());\n\n return JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive)!;\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs#L22-L39", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QdrantVectorStoreRecordCollectionTests.CanCreateCollectionWithMismatchedDefinitionAndType", "code": "[Fact]\n public void CanCreateCollectionWithMismatchedDefinitionAndType()\n {\n // Arrange.\n var definition = new VectorStoreRecordDefinition()\n {\n Properties = new List\n {\n new VectorStoreRecordKeyProperty(\"Id\", typeof(ulong)),\n new VectorStoreRecordDataProperty(\"Text\", typeof(string)),\n new VectorStoreRecordVectorProperty(\"Embedding\", typeof(ReadOnlyMemory)) { Dimensions = 4 },\n }\n };\n\n // Act.\n var sut = new QdrantVectorStoreRecordCollection>(\n this._qdrantClientMock.Object,\n TestCollectionName,\n new() { VectorStoreRecordDefinition = definition, PointStructCustomMapper = Mock.Of, PointStruct>>() });\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordCollectionTests.cs#L530-L549", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PromptTemplateConfigExtensions.SetMaxTokens", "code": "internal static void SetMaxTokens(this PromptTemplateConfig config, int maxTokens)\n {\n var executionSettings = config.ExecutionSettings;\n foreach (var setting in executionSettings)\n {\n if (setting.Value.ExtensionData is not null)\n {\n setting.Value.ExtensionData[\"max_tokens\"] = maxTokens;\n }\n }\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Experimental/Orchestration.Flow/Extensions/PromptTemplateConfigExtensions.cs#L15-L25", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DaprKernelProcessFactory.StartAsync", "code": "public static async Task StartAsync(this KernelProcess process, KernelProcessEvent initialEvent, string? processId = null)\n {\n Verify.NotNull(process);\n Verify.NotNullOrWhiteSpace(process.State?.Name);\n Verify.NotNull(initialEvent);\n\n // Assign the process Id if one is provided and the processes does not already have an Id.\n if (!string.IsNullOrWhiteSpace(processId) && string.IsNullOrWhiteSpace(process.State.Id))\n {\n process = process with { State = process.State with { Id = processId } };\n }\n\n DaprKernelProcessContext processContext = new(process);\n await processContext.StartWithEventAsync(initialEvent).ConfigureAwait(false);\n return processContext;\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Experimental/Process.Runtime.Dapr/DaprKernelProcessFactory.cs#L18-L33", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "XunitLogger.Log", "code": "public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter)\n {\n this._output.WriteLine(state?.ToString());\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/IntegrationTests/XunitLogger.cs#L17-L20", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MixedAgentTests.OpenAIMixedAgentTestAsync", "code": "[Theory(Skip = \"OpenAI will often throttle requests. This test is for manual verification.\")]\n [InlineData(false)]\n [InlineData(true)]\n public async Task OpenAIMixedAgentTestAsync(bool useNewFunctionCallingModel)\n {\n OpenAIConfiguration openAISettings = this._configuration.GetSection(\"OpenAI\").Get()!;\n Assert.NotNull(openAISettings);\n\n // Arrange, Act & Assert\n await this.VerifyAgentExecutionAsync(\n this.CreateChatCompletionKernel(openAISettings),\n OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(openAISettings.ApiKey)),\n openAISettings.ChatModelId!,\n useNewFunctionCallingModel);\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs#L36-L50", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QdrantTextSearchTests.CreateTextSearchAsync", "code": "public override Task CreateTextSearchAsync()\n {\n if (this.VectorStore is null)\n {\n this.EmbeddingGenerator = fixture.EmbeddingGenerator;\n this.VectorStore = new QdrantVectorStore(fixture.QdrantClient);\n }\n\n var options = new QdrantVectorStoreRecordCollectionOptions\n {\n HasNamedVectors = true,\n VectorStoreRecordDefinition = fixture.HotelVectorStoreRecordDefinition,\n };\n var vectorSearch = new QdrantVectorStoreRecordCollection(fixture.QdrantClient, \"namedVectorsHotels\", options);\n var stringMapper = new HotelInfoTextSearchStringMapper();\n var resultMapper = new HotelInfoTextSearchResultMapper();\n\n var result = new VectorStoreTextSearch(vectorSearch, this.EmbeddingGenerator!, stringMapper, resultMapper);\n return Task.FromResult(result);\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantTextSearchTests.cs#L20-L39", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ClientResultExceptionExtensions.ToHttpOperationException", "code": "public static HttpOperationException ToHttpOperationException(this ClientResultException exception)\n {\n const int NoResponseReceived = 0;\n\n string? responseContent = null;\n\n try\n {\n responseContent = exception.GetRawResponse()?.Content.ToString();\n }\n#pragma warning disable CA1031 // Do not catch general exception types\n catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead.\n#pragma warning restore CA1031\n\n return new HttpOperationException(\n exception.Status == NoResponseReceived ? null : (HttpStatusCode?)exception.Status,\n responseContent,\n exception.Message,\n exception);\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/InternalUtilities/openai/Extensions/ClientResultExceptionExtensions.cs#L19-L38", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProcessStateMetadataFactory.KernelProcessToProcessStateMetadata", "code": "public static KernelProcessStateMetadata KernelProcessToProcessStateMetadata(KernelProcess kernelProcess)\n {\n KernelProcessStateMetadata metadata = new()\n {\n Name = kernelProcess.State.Name,\n Id = kernelProcess.State.Id,\n VersionInfo = kernelProcess.State.Version,\n StepsState = [],\n };\n\n foreach (KernelProcessStepInfo step in kernelProcess.Steps)\n {\n metadata.StepsState.Add(step.State.Name, step.ToProcessStateMetadata());\n }\n\n return metadata;\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/InternalUtilities/process/Abstractions/KernelProcessStateMetadataFactory.cs#L14-L30", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Verify.NotNull", "code": "[MethodImpl(MethodImplOptions.AggressiveInlining)]\n internal static void NotNull([NotNull] object? obj, [CallerArgumentExpression(nameof(obj))] string? paramName = null)\n {\n#if NET\n ArgumentNullException.ThrowIfNull(obj, paramName);\n#else\n if (obj is null)\n {\n ThrowArgumentNullException(paramName);\n }\n#endif\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/InternalUtilities/src/Diagnostics/Verify.cs#L33-L44", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MathPlugin.Add", "code": "[KernelFunction, Description(\"Adds an amount to a value\")]\n [return: Description(\"The sum\")]\n public int Add(\n [Description(\"The value to add\")] int value,\n [Description(\"Amount to add\")] int amount) =>\n value + amount;", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Plugins/Plugins.Core/MathPlugin.cs#L18-L23", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GoogleTextSearch.DefaultTextSearchResultMapper.MapFromResultToTextSearchResult", "code": "public TextSearchResult MapFromResultToTextSearchResult(object result)\n {\n if (result is not global::Google.Apis.CustomSearchAPI.v1.Data.Result googleResult)\n {\n throw new ArgumentException(\"Result must be a Google Result\", nameof(result));\n }\n\n return new TextSearchResult(googleResult.Snippet) { Name = googleResult.Title, Link = googleResult.Link };\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/Plugins/Plugins.Web/Google/GoogleTextSearch.cs#L288-L296", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TextGenerationExtensions.GetTextContentAsync", "code": "public static async Task GetTextContentAsync(\n this ITextGenerationService textGenerationService,\n string prompt,\n PromptExecutionSettings? executionSettings = null,\n Kernel? kernel = null,\n CancellationToken cancellationToken = default)\n => (await textGenerationService.GetTextContentsAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false))\n .Single();", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/TextGenerationExtensions.cs#L27-L34", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RestApiOperationResponseConverter.CanConvertTo", "code": "public override bool CanConvertTo(ITypeDescriptorContext? context, Type? destinationType)\n {\n return destinationType == typeof(string) || base.CanConvertTo(context, destinationType);\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponseConverter.cs#L15-L18", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PromptTemplateFactoryExtensions.Create", "code": "public static IPromptTemplate Create(this IPromptTemplateFactory factory, PromptTemplateConfig templateConfig)\n {\n Verify.NotNull(factory);\n Verify.NotNull(templateConfig);\n\n if (!factory.TryCreate(templateConfig, out IPromptTemplate? result))\n {\n throw new KernelException($\"Prompt template format {templateConfig.TemplateFormat} is not supported.\");\n }\n\n return result;\n }", "docstring": "/// ", "url": "https://github.com/microsoft/semantic-kernel/blob/cd84e877980187e62d86bb5bc6086d264e62ee83/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateFactoryExtensions.cs#L17-L28", "sha": "cd84e877980187e62d86bb5bc6086d264e62ee83"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IEnumerableExtensions.HasExactly", "code": "public static bool HasExactly(this IEnumerable source, int count)\n {\n if (source is null)\n {\n throw new ArgumentNullException(nameof(source));\n }\n\n if (source is ICollection collectionoft)\n {\n return collectionoft.Count == count;\n }\n\n if (source is ICollection collection)\n {\n return collection.Count == count;\n }\n\n using var enumerator = source.GetEnumerator();\n while (count-- > 0)\n {\n if (!enumerator.MoveNext())\n {\n return false;\n }\n }\n\n return !enumerator.MoveNext();\n }", "docstring": "/// ", "url": "https://github.com/SteveDunn/Intellenum/blob/d3cdb8826f3af62856a41a6974c76a7c5288f326/src/Intellenum/Extensions/IEnumerableExtensions.cs#L110-L137", "sha": "d3cdb8826f3af62856a41a6974c76a7c5288f326"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "initial.BuildTargetModel", "code": "protected override void BuildTargetModel(ModelBuilder modelBuilder)\n {\n#pragma warning disable 612, 618\n modelBuilder\n .HasAnnotation(\"ProductVersion\", \"8.0.0-preview.6.23329.4\")\n .HasAnnotation(\"Relational:MaxIdentifierLength\", 128);\n\n SqlServerModelBuilderExtensions.UseIdentityColumns(modelBuilder);\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRole\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"int\");\n\n SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property(\"Id\"));\n\n b.Property(\"ConcurrencyStamp\")\n .IsConcurrencyToken()\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"Name\")\n .HasMaxLength(256)\n .HasColumnType(\"nvarchar(256)\");\n\n b.Property(\"NormalizedName\")\n .HasMaxLength(256)\n .HasColumnType(\"nvarchar(256)\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"NormalizedName\")\n .IsUnique()\n .HasDatabaseName(\"RoleNameIndex\")\n .HasFilter(\"[NormalizedName] IS NOT NULL\");\n\n b.ToTable(\"AspNetRoles\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRoleClaim\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"int\");\n\n SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property(\"Id\"));\n\n b.Property(\"ClaimType\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"ClaimValue\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"int\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"AspNetRoleClaims\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUser\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"int\");\n\n SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property(\"Id\"));\n\n b.Property(\"AccessFailedCount\")\n .HasColumnType(\"int\");\n\n b.Property(\"ConcurrencyStamp\")\n .IsConcurrencyToken()\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"Email\")\n .HasMaxLength(256)\n .HasColumnType(\"nvarchar(256)\");\n\n b.Property(\"EmailConfirmed\")\n .HasColumnType(\"bit\");\n\n b.Property(\"LockoutEnabled\")\n .HasColumnType(\"bit\");\n\n b.Property(\"LockoutEnd\")\n .HasColumnType(\"datetimeoffset\");\n\n b.Property(\"NormalizedEmail\")\n .HasMaxLength(256)\n .HasColumnType(\"nvarchar(256)\");\n\n b.Property(\"NormalizedUserName\")\n .HasMaxLength(256)\n .HasColumnType(\"nvarchar(256)\");\n\n b.Property(\"PasswordHash\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"PhoneNumber\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"PhoneNumberConfirmed\")\n .HasColumnType(\"bit\");\n\n b.Property(\"SecurityStamp\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"TwoFactorEnabled\")\n .HasColumnType(\"bit\");\n\n b.Property(\"UserName\")\n .HasMaxLength(256)\n .HasColumnType(\"nvarchar(256)\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"NormalizedEmail\")\n .HasDatabaseName(\"EmailIndex\");\n\n b.HasIndex(\"NormalizedUserName\")\n .IsUnique()\n .HasDatabaseName(\"UserNameIndex\")\n .HasFilter(\"[NormalizedUserName] IS NOT NULL\");\n\n b.ToTable(\"AspNetUsers\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserClaim\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"int\");\n\n SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property(\"Id\"));\n\n b.Property(\"ClaimType\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"ClaimValue\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"int\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"AspNetUserClaims\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserLogin\", b =>\n {\n b.Property(\"LoginProvider\")\n .HasColumnType(\"nvarchar(450)\");\n\n b.Property(\"ProviderKey\")\n .HasColumnType(\"nvarchar(450)\");\n\n b.Property(\"ProviderDisplayName\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"UserId\")\n .HasColumnType(\"int\");\n\n b.HasKey(\"LoginProvider\", \"ProviderKey\");\n\n b.HasIndex(\"UserId\");\n\n b.ToTable(\"AspNetUserLogins\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserRole\", b =>\n {\n b.Property(\"UserId\")\n .HasColumnType(\"int\");\n\n b.Property(\"RoleId\")\n .HasColumnType(\"int\");\n\n b.HasKey(\"UserId\", \"RoleId\");\n\n b.HasIndex(\"RoleId\");\n\n b.ToTable(\"AspNetUserRoles\", (string)null);\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserToken\", b =>\n {\n b.Property(\"UserId\")\n .HasColumnType(\"int\");\n\n b.Property(\"LoginProvider\")\n .HasColumnType(\"nvarchar(450)\");\n\n b.Property(\"Name\")\n .HasColumnType(\"nvarchar(450)\");\n\n b.Property(\"Value\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.HasKey(\"UserId\", \"LoginProvider\", \"Name\");\n\n b.ToTable(\"AspNetUserTokens\", (string)null);\n });\n\n modelBuilder.Entity(\"PackagesManagementDB.Models.Destination\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"int\");\n\n SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property(\"Id\"));\n\n b.Property(\"Country\")\n .IsRequired()\n .HasMaxLength(128)\n .HasColumnType(\"nvarchar(128)\");\n\n b.Property(\"Description\")\n .HasColumnType(\"nvarchar(max)\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasMaxLength(128)\n .HasColumnType(\"nvarchar(128)\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"Country\");\n\n b.HasIndex(\"Name\");\n\n b.ToTable(\"Destinations\");\n });\n\n modelBuilder.Entity(\"PackagesManagementDB.Models.Package\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"int\");\n\n SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property(\"Id\"));\n\n b.Property(\"Description\")\n .HasMaxLength(128)\n .HasColumnType(\"nvarchar(128)\");\n\n b.Property(\"DestinationId\")\n .HasColumnType(\"int\");\n\n b.Property(\"DurationInDays\")\n .HasColumnType(\"int\");\n\n b.Property(\"EndValidityDate\")\n .HasColumnType(\"datetime2\");\n\n b.Property(\"EntityVersion\")\n .IsConcurrencyToken()\n .HasColumnType(\"bigint\");\n\n b.Property(\"Name\")\n .IsRequired()\n .HasMaxLength(128)\n .HasColumnType(\"nvarchar(128)\");\n\n b.Property(\"Price\")\n .HasColumnType(\"decimal(18,2)\");\n\n b.Property(\"StartValidityDate\")\n .HasColumnType(\"datetime2\");\n\n b.HasKey(\"Id\");\n\n b.HasIndex(\"DestinationId\");\n\n b.HasIndex(\"Name\");\n\n b.HasIndex(\"StartValidityDate\", \"EndValidityDate\");\n\n b.ToTable(\"Packages\");\n });\n\n modelBuilder.Entity(\"PackagesManagementDB.Models.PackageEvent\", b =>\n {\n b.Property(\"Id\")\n .ValueGeneratedOnAdd()\n .HasColumnType(\"bigint\");\n\n SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property(\"Id\"));\n\n b.Property(\"NewPrice\")\n .HasColumnType(\"decimal(18,2)\");\n\n b.Property(\"NewVersion\")\n .HasColumnType(\"bigint\");\n\n b.Property(\"OldVersion\")\n .HasColumnType(\"bigint\");\n\n b.Property(\"PackageId\")\n .HasColumnType(\"int\");\n\n b.Property(\"Type\")\n .HasColumnType(\"int\");\n\n b.HasKey(\"Id\");\n\n b.ToTable(\"PackageEvents\");\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityRoleClaim\", b =>\n {\n b.HasOne(\"Microsoft.AspNetCore.Identity.IdentityRole\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserClaim\", b =>\n {\n b.HasOne(\"Microsoft.AspNetCore.Identity.IdentityUser\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserLogin\", b =>\n {\n b.HasOne(\"Microsoft.AspNetCore.Identity.IdentityUser\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserRole\", b =>\n {\n b.HasOne(\"Microsoft.AspNetCore.Identity.IdentityRole\", null)\n .WithMany()\n .HasForeignKey(\"RoleId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.HasOne(\"Microsoft.AspNetCore.Identity.IdentityUser\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"Microsoft.AspNetCore.Identity.IdentityUserToken\", b =>\n {\n b.HasOne(\"Microsoft.AspNetCore.Identity.IdentityUser\", null)\n .WithMany()\n .HasForeignKey(\"UserId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n });\n\n modelBuilder.Entity(\"PackagesManagementDB.Models.Package\", b =>\n {\n b.HasOne(\"PackagesManagementDB.Models.Destination\", \"MyDestination\")\n .WithMany(\"Packages\")\n .HasForeignKey(\"DestinationId\")\n .OnDelete(DeleteBehavior.Cascade)\n .IsRequired();\n\n b.Navigation(\"MyDestination\");\n });\n\n modelBuilder.Entity(\"PackagesManagementDB.Models.Destination\", b =>\n {\n b.Navigation(\"Packages\");\n });\n#pragma warning restore 612, 618\n }", "docstring": "/// ", "url": "https://github.com/PacktPublishing/Software-Architecture-with-C-Sharp-12-and-.NET-8-4E/blob/bb5f06a93c7a6ca281f88b0e3e16fb1302ebf943/ch09/PackagesManagementWithTests/PackagesManagementDB/Migrations/20230717141854_initial.Designer.cs#L19-L401", "sha": "bb5f06a93c7a6ca281f88b0e3e16fb1302ebf943"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Benchmarks.Clr.DynamicCallDotnetMethod", "code": "[Benchmark]\n public void DynamicCallDotnetMethod()\n {\n _jsFunctionCallMethodDynamic.CallAsStatic(_jsHost);\n }", "docstring": "// CLR-only (non-AOT) benchmarks", "url": "https://github.com/microsoft/node-api-dotnet/blob/956d1b14426f3c41c3464d60544c5ee486241d3f/bench/Benchmarks.cs#L263-L267", "sha": "956d1b14426f3c41c3464d60544c5ee486241d3f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NodeWorker.MessagePort.Unref", "code": "public void Unref()\n {\n _portRef.Run((port) => port.CallMethod(\"unref\"));\n }", "docstring": "/// ", "url": "https://github.com/microsoft/node-api-dotnet/blob/956d1b14426f3c41c3464d60544c5ee486241d3f/src/NodeApi/Interop/NodeWorker.cs#L299-L302", "sha": "956d1b14426f3c41c3464d60544c5ee486241d3f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EnumPolyfill.GetValues", "code": "public static TEnum[] GetValues()\n where TEnum : struct, Enum\n {\n#if NETCOREAPPX || NETFRAMEWORK || NETSTANDARD\n var values = Enum.GetValues(typeof(TEnum));\n var result = new TEnum[values.Length];\n Array.Copy(values, result, values.Length);\n return result;\n#else\n return Enum.GetValues();\n#endif\n }", "docstring": "/// ", "url": "https://github.com/SimonCropp/Polyfill/blob/b3fc2b19ec553e6971f8bb688d0116c8b61f6788/src/Polyfill/EnumPolyfill.cs#L23-L34", "sha": "b3fc2b19ec553e6971f8bb688d0116c8b61f6788"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Polyfill.GetStreamAsync", "code": "public static async Task GetStreamAsync(\n this HttpClient target,\n string requestUri,\n CancellationToken cancellationToken = default)\n {\n try\n {\n // Must not be disposed for the stream to be usable\n var response = await target.GetAsync(\n requestUri,\n HttpCompletionOption.ResponseHeadersRead,\n cancellationToken\n ).ConfigureAwait(false);\n\n response.EnsureSuccessStatusCode();\n\n return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);\n }\n // Older versions of HttpClient methods don't propagate the cancellation token inside the exception\n catch (OperationCanceledException ex) when (\n ex.CancellationToken != cancellationToken &&\n cancellationToken.IsCancellationRequested)\n {\n throw new OperationCanceledException(ex.Message, ex.InnerException, cancellationToken);\n }\n }", "docstring": "/// ", "url": "https://github.com/SimonCropp/Polyfill/blob/b3fc2b19ec553e6971f8bb688d0116c8b61f6788/src/Polyfill/Polyfill_HttpClient.cs#L23-L48", "sha": "b3fc2b19ec553e6971f8bb688d0116c8b61f6788"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Polyfill.SaveAsync", "code": "public static Task SaveAsync(\n this XDocument target,\n XmlWriter writer,\n CancellationToken cancellationToken)\n {\n cancellationToken.ThrowIfCancellationRequested();\n target.Save(writer);\n return Task.CompletedTask;\n }", "docstring": "/// ", "url": "https://github.com/SimonCropp/Polyfill/blob/b3fc2b19ec553e6971f8bb688d0116c8b61f6788/src/Polyfill/Polyfill_XDocument.cs#L26-L34", "sha": "b3fc2b19ec553e6971f8bb688d0116c8b61f6788"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OperatingSystemPolyfill.IsOSPlatform", "code": "public static bool IsOSPlatform(string platform) =>\n#if NET5_0_OR_GREATER\n OperatingSystem.IsOSPlatform(platform);\n#else\n RuntimeInformation.IsOSPlatform(OSPlatform.Create(platform));\n#endif\n\n /// \n /// Checks if the operating system version is greater than or equal to the specified platform version. This method can be used to guard APIs that were added in the specified OS version.\n /// \n /// The case-insensitive platform name. Examples: Browser, Linux, FreeBSD, Android, iOS, macOS, tvOS, watchOS, Windows.\n /// The major release number.\n /// The minor release number (optional).\n /// The build release number (optional).\n /// The revision release number (optional).\n /// true if the current application is running on the specified platform and is at least in the version specified in the parameters; false otherwise.\n ///Link: https://learn.microsoft.com/en-us/dotnet/api/system.operatingsystem.isosplatformversionatleast", "docstring": "/// ", "url": "https://github.com/SimonCropp/Polyfill/blob/b3fc2b19ec553e6971f8bb688d0116c8b61f6788/src/Polyfill/OperatingSystemPolyfill.cs#L52-L68", "sha": "b3fc2b19ec553e6971f8bb688d0116c8b61f6788", "code/function": "public static bool IsOSPlatform(string platform) =>\n#if NET5_0_OR_GREATER\n OperatingSystem.IsOSPlatform(platform);\n#else\n RuntimeInformation.IsOSPlatform(OSPlatform.Create(platform));\n#endif\n\n \n /// Checks if the operating system version is greater than or equal to the specified platform version. This method can be used to guard APIs that were added in the specified OS version.\n /// \n /// The case-insensitive platform name. Examples: Browser, Linux, FreeBSD, Android, iOS, macOS, tvOS, watchOS, Windows.\n /// The major release number.\n /// The minor release number (optional).\n /// The build release number (optional).\n /// The revision release number (optional).\n /// true if the current application is running on the specified platform and is at least in the version specified in the parameters; false otherwise.\n ///Link: https://learn.microsoft.com/en-us/dotnet/api/system.operatingsystem.isosplatformversionatleast"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UIntPolyfill.TryParse", "code": "public static bool TryParse(string? target, IFormatProvider? provider, out uint result) =>\n#if NET7_0_OR_GREATER\n uint.TryParse(target, provider, out result);\n#else\n uint.TryParse(target, NumberStyles.Integer, provider, out result);\n#endif", "docstring": "/// ", "url": "https://github.com/SimonCropp/Polyfill/blob/b3fc2b19ec553e6971f8bb688d0116c8b61f6788/src/Polyfill/Numbers/UIntPolyfill.cs#L23-L28", "sha": "b3fc2b19ec553e6971f8bb688d0116c8b61f6788"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CutFloatValueEventArgs.ReadXml", "code": "public override void ReadXml(XmlNode node)\n {\n base.ReadXml(node);\n fValue = Xml.GetChildFloatAttribute(node, \"fValue\", \"value\");\n }", "docstring": "// PsoDataType.Float, 32, 0, 0)", "url": "https://github.com/grzybeek/grzyClothTool/blob/4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5/CodeWalker/CodeWalker.Core/GameFiles/FileTypes/CutFile.cs#L984-L988", "sha": "4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CutVehicleExtraEventArgs.ReadXml", "code": "public override void ReadXml(XmlNode node)\n {\n base.ReadXml(node);\n pExtraBoneIds = Xml.GetChildRawIntArray(node, \"pExtraBoneIds\");\n }", "docstring": "// PsoDataType.Array, 40, 0, (MetaName)3)//ARRAYINFO, PsoDataType.SInt, 0, 0, 0),", "url": "https://github.com/grzybeek/grzyClothTool/blob/4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5/CodeWalker/CodeWalker.Core/GameFiles/FileTypes/CutFile.cs#L1181-L1185", "sha": "4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnimSequence.EvaluateQuaternionType7", "code": "public Quaternion EvaluateQuaternionType7(int frame)\n {\n if (!IsType7Quat)\n {\n return new Quaternion(\n Channels[0].EvaluateFloat(frame),\n Channels[1].EvaluateFloat(frame),\n Channels[2].EvaluateFloat(frame),\n Channels[3].EvaluateFloat(frame)\n );\n }\n\n var t7 = Channels[3] as AnimChannelCachedQuaternion;//type 1\n if (t7 == null) t7 = Channels[4] as AnimChannelCachedQuaternion;//type 2\n\n var x = Channels[0].EvaluateFloat(frame);\n var y = Channels[1].EvaluateFloat(frame);\n var z = Channels[2].EvaluateFloat(frame);\n var normalized = t7.EvaluateFloat(frame);\n\n switch (t7.QuatIndex)\n {\n case 0:\n return new Quaternion(normalized, x, y, z);\n case 1:\n return new Quaternion(x, normalized, y, z);\n case 2:\n return new Quaternion(x, y, normalized, z);\n case 3:\n return new Quaternion(x, y, z, normalized);\n default:\n return Quaternion.Identity;\n }\n }", "docstring": "//for convenience", "url": "https://github.com/grzybeek/grzyClothTool/blob/4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5/CodeWalker/CodeWalker.Core/GameFiles/Resources/Clip.cs#L2171-L2204", "sha": "4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FbxIO.Read", "code": "public static FbxDocument Read(byte[] data)\n {\n using (var stream = new MemoryStream(data))\n {\n var isbinary = FbxBinary.IsBinary(stream);\n if (isbinary)\n {\n var reader = new FbxBinaryReader(stream);\n return reader.Read();\n }\n else //try ASCII\n {\n var reader = new FbxAsciiReader(stream);\n return reader.Read();\n }\n }\n }", "docstring": "/// ", "url": "https://github.com/grzybeek/grzyClothTool/blob/4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5/CodeWalker/CodeWalker.Core/Utils/Fbx.cs#L35-L51", "sha": "4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Program.Main", "code": "[STAThread]\n static void Main(string[] args)\n {\n //Application.SetHighDpiMode(HighDpiMode.SystemAware);\n Application.EnableVisualStyles();\n Application.SetCompatibleTextRenderingDefault(false);\n\n // Always check the GTA folder first thing\n if (!GTAFolder.UpdateGTAFolder(Properties.Settings.Default.RememberGTAFolder))\n {\n MessageBox.Show(\"Could not load CodeWalker because no valid GTA 5 folder was selected. CodeWalker will now exit.\", \"GTA 5 Folder Not Found\", MessageBoxButtons.OK, MessageBoxIcon.Stop);\n return;\n }\n#if !DEBUG\n try\n {\n#endif\n#if !DEBUG\n }\n catch (Exception ex)\n {\n MessageBox.Show(\"An unexpected error was encountered!\\n\" + ex.ToString());\n //this can happen if folder wasn't chosen, or in some other catastrophic error. meh.\n }\n#endif\n }", "docstring": "/// ", "url": "https://github.com/grzybeek/grzyClothTool/blob/4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5/CodeWalker/CodeWalker/Program.cs#L18-L43", "sha": "4f0ddcfffadcf3fbf8a4529b084c90bf464a81b5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PromptGeneratorService.GeneratePromptFromDataset", "code": "public string GeneratePromptFromDataset(string[] tags, string prependTags, string appendTags, int amountOfTags)\n {\n _stringBuilder.Clear();\n\n if (!string.IsNullOrEmpty(prependTags))\n {\n _stringBuilder.Append(prependTags);\n _stringBuilder.Append(\", \");\n }\n\n for (int i = 0; i < amountOfTags; i++)\n {\n string tag = tags[_random.Next(tags.Length)];\n _stringBuilder.Append(tag);\n if (i != amountOfTags - 1)\n {\n _stringBuilder.Append(\", \");\n }\n }\n\n if (!string.IsNullOrEmpty(appendTags))\n {\n _stringBuilder.Append(\", \");\n _stringBuilder.Append(appendTags);\n }\n\n string prompt = _stringBuilder.ToString().Replace(\", , \", \", \").Replace(\", ,\", \", \").Replace(\" \", \" \");\n return prompt;\n }", "docstring": "/// ", "url": "https://github.com/Particle1904/DatasetHelpers/blob/f55d0567c227a6eec658f597384439056e47762f/SmartData.Lib/Services/PromptGeneratorService.cs#L39-L67", "sha": "f55d0567c227a6eec658f597384439056e47762f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TagProcessorService.ProcessAllTagFiles", "code": "public async Task ProcessAllTagFiles(string inputFolderPath, string tagsToAdd, string tagsToEmphasize, string tagsToRemove)\n {\n string[] files = Utilities.GetFilesByMultipleExtensions(inputFolderPath, _txtSearchPattern);\n CancellationToken cancellationToken = _cancellationTokenSource.Token;\n\n TotalFilesChanged?.Invoke(this, files.Length);\n\n foreach (string file in files)\n {\n cancellationToken.ThrowIfCancellationRequested();\n\n string readTags = await File.ReadAllTextAsync(file);\n string processedTags = ProcessListOfTags(readTags, tagsToAdd, tagsToEmphasize, tagsToRemove);\n await File.WriteAllTextAsync(file, processedTags);\n ProgressUpdated?.Invoke(this, EventArgs.Empty);\n }\n }", "docstring": "/// ", "url": "https://github.com/Particle1904/DatasetHelpers/blob/f55d0567c227a6eec658f597384439056e47762f/SmartData.Lib/Services/TagProcessorService.cs#L152-L168", "sha": "f55d0567c227a6eec658f597384439056e47762f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StartActivityService.Submit", "code": "public override async Task Submit(InstanceDataInput input)\n {\n\n InstanceEntity instance = null;\n\n WorkflowTemplateEntity tpl = null;\n ActivityModel currentActivity = null;\n var workItemId = 0l;\n var workItem = default(WorkItemEntity);\n if (input.Type == QueryType.WorkItem)\n {\n workItem = await _workItemRepository.GetAsync(input.WorkItemId);\n if (workItem == null)\n throw ResultOutput.Exception(\"该工作项不存在。\");\n else\n {\n if (workItem.ParticipantId != User.Id)\n {//后续考虑委托办理和管理员权限可以代为操作\n //throw ResultOutput.Exception(\"您不是该节点审批人,无权进行此操作!\");\n }\n if (workItem.State > ActivityState.UnRead)\n {\n throw ResultOutput.Exception(\"该工作项当前不可操作。\");\n }\n tpl = await _workflowTemplateRepository.GetAsync(workItem.WorkflowTemplateId);\n instance = await _instanceRepository.GetAsync(workItem.InstanceId);\n currentActivity = _activityService.GetActivity(tpl, workItem.ActivityId);\n\n if (!input.IsSaveOnly)\n ValidInput(input, tpl);\n\n workItemId=input.WorkItemId;\n\n }\n }\n else\n if (input.Type == QueryType.Template)\n {\n tpl = await _workflowTemplateRepository.GetAsync(input.TemplateId);\n\n if (input.InstanceId <= 0)\n {//如果还没有创建实例,先创建实例\n\n instance = new InstanceEntity();\n instance.FormModel = JsonConvert.SerializeObject(input.Form);\n\n instance.TemplateId = input.TemplateId;\n instance.GroupId = tpl.GroupId;\n instance.Name = $\"{User.Name}发起的『{tpl.Name}』流程\";\n instance.OUId = input.OUId;\n instance.InitiatorId = User.Id;\n if (!input.IsSaveOnly)\n ///仅保存的话不存储流程实例\n {\n instance.State = InstanceState.Running;\n var setting = await base.GetSetting();\n instance.ReferenceNo = AiliCould.Core.BPM.Helper.ReferenceNoHelper.GetNo(setting.ReferenceNoSetting, \"\");\n }\n else\n instance.State = InstanceState.UnInitiated;\n var res = await _instanceRepository.InsertAsync(instance);\n\n }\n currentActivity = _activityService.GetActivity(tpl, input.ActivityCode);\n {//沒有創建 workItem 則創建 workItem\n workItem = await AddCurrentWorkItem(instance, tpl, currentActivity,input.IsSaveOnly);\n //完成当前节点工作项目\n\n }\n workItemId = workItem.Id;\n }\n\n\n if (string.IsNullOrEmpty(instance.ReferenceNo) && !input.IsSaveOnly)\n {\n var setting = await base.GetSetting();\n \n instance.ReferenceNo = AiliCould.Core.BPM.Helper.ReferenceNoHelper.GetNo(setting.ReferenceNoSetting, \"\");\n instance.State = InstanceState.Running;\n }\n\n ///根据表单设置的权限更新表单\n var formModel = _activityService.UpdateForm(instance.FormModel, currentActivity.Permission, input.Form);\n\n instance.FormModel = JsonConvert.SerializeObject(formModel);\n ///更新表单信息\n await _instanceRepository.UpdateAsync(instance);\n if (input.IsSaveOnly)\n return null;\n else\n {\n await FinishWorkItem(workItemId, input.Comment, input.ApprovalResult);\n return new ActivityOutput\n {\n Instance = instance,\n Template = tpl,\n OptionalParticipants = input.OptionalParticipants,\n FormModel = formModel,\n CurrentActivity = currentActivity\n\n };\n\n }\n\n\n }", "docstring": "/// ", "url": "https://github.com/leooneone/aibpm.plus/blob/00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5/Modules/AI/AI.BPM/Services/BPM/Activity/Activities/StartActivityService.cs#L40-L145", "sha": "00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RoleRepository.GetChildIdListAsync", "code": "public async Task> GetChildIdListAsync(long id)\n {\n return await Select\n .Where(a => a.Id == id)\n .AsTreeCte()\n .ToListAsync(a => a.Id);\n }", "docstring": "/// ", "url": "https://github.com/leooneone/aibpm.plus/blob/00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5/src/platform/ZhonTai.Admin/Repositories/Role/RoleRepository.cs#L20-L26", "sha": "00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UserOrgRepository.HasUser", "code": "public async Task HasUser(long id)\n {\n return await Select.Where(a => a.OrgId == id).AnyAsync();\n }", "docstring": "/// ", "url": "https://github.com/leooneone/aibpm.plus/blob/00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5/src/platform/ZhonTai.Admin/Repositories/UserOrg/UserOrgRepository.cs#L20-L23", "sha": "00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CacheService.GetList", "code": "public List GetList()\n {\n var list = new List();\n\n var appConfig = LazyGetRequiredService();\n Assembly[] assemblies = AssemblyHelper.GetAssemblyList(appConfig.AssemblyNames);\n\n foreach (Assembly assembly in assemblies)\n {\n var types = assembly.GetExportedTypes().Where(a => a.GetCustomAttribute(false) != null);\n foreach (Type type in types)\n {\n var fields = type.GetFields(BindingFlags.Public | BindingFlags.Static | BindingFlags.FlattenHierarchy);\n foreach (FieldInfo field in fields)\n {\n var descriptionAttribute = field.GetCustomAttributes(typeof(DescriptionAttribute), false).FirstOrDefault() as DescriptionAttribute;\n\n list.Add(new\n {\n field.Name,\n Value = field.GetRawConstantValue().ToString(),\n descriptionAttribute?.Description\n });\n }\n }\n }\n\n return list;\n }", "docstring": "/// ", "url": "https://github.com/leooneone/aibpm.plus/blob/00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5/src/platform/ZhonTai.Admin/Services/Cache/CacheService.cs#L32-L60", "sha": "00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ConfigHelper.Load", "code": "public static IConfiguration Load(string fileName, string environmentName = \"\", bool optional = true, bool reloadOnChange = false)\n {\n var filePath = Path.Combine(AppContext.BaseDirectory, \"Configs\");\n if (!Directory.Exists(filePath))\n return null;\n\n var builder = new ConfigurationBuilder()\n .SetBasePath(filePath)\n .AddJsonFile(fileName.ToLower() + \".json\", optional, reloadOnChange);\n\n if (environmentName.NotNull())\n {\n builder.AddJsonFile(fileName.ToLower() + \".\" + environmentName + \".json\", optional: optional, reloadOnChange: reloadOnChange);\n }\n\n return builder.Build();\n }", "docstring": "/* 使用热更新\n var uploadConfig = new ConfigHelper().Load(\"uploadconfig\", _env.EnvironmentName, true);\n services.Configure(uploadConfig);\n\n private readonly UploadConfig _uploadConfig;\n public ImgController(IOptionsMonitor uploadConfig)\n {\n _uploadConfig = uploadConfig.CurrentValue;\n }\n */", "url": "https://github.com/leooneone/aibpm.plus/blob/00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5/src/platform/ZhonTai.Common/Helpers/ConfigHelper.cs#L31-L47", "sha": "00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PasswordHelper.Verify", "code": "public static bool Verify(string input)\n {\n if (input.IsNull())\n {\n return false;\n }\n\n return RegexPassword().IsMatch(input);\n }", "docstring": "/// ", "url": "https://github.com/leooneone/aibpm.plus/blob/00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5/src/platform/ZhonTai.Common/Helpers/PasswordHelper.cs#L19-L27", "sha": "00b7e0f9274fbdeefd1686d65bd9ac414a1ebaf5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RazorSlice.Write", "code": "protected void Write(char? value)\n {\n if (value.HasValue)\n {\n WriteUtf8SpanFormattable(value.Value);\n }\n }", "docstring": "/// ", "url": "https://github.com/DamianEdwards/RazorSlices/blob/cf2e413551e49c3a3b40623ce020d678697f123a/src/RazorSlices/RazorSlice.Formattables.cs#L28-L34", "sha": "cf2e413551e49c3a3b40623ce020d678697f123a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServiceInfo.IsServiceRunning", "code": "public static bool IsServiceRunning(string serviceName)\n {\n return IsServiceRunning(Environment.MachineName, serviceName);\n }", "docstring": "/// ", "url": "https://github.com/microsoft/p4vfs/blob/10d93293a4846b4fee3267f9a6da38f8fd5c5a66/source/P4VFS.Extensions/Source/Utilities/ServiceInfo.cs#L58-L61", "sha": "10d93293a4846b4fee3267f9a6da38f8fd5c5a66"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DiffView.ShowDiffViewAsync", "code": "public static async System.Threading.Tasks.Task ShowDiffViewAsync(string filePath, string originalCode, string optimizedCode)\n {\n string extension = System.IO.Path.GetExtension(filePath).TrimStart('.');\n\n string tempFolder = System.IO.Path.GetTempPath();\n string tempFilePath1 = System.IO.Path.Combine(tempFolder, $\"Original.{extension}\");\n string tempFilePath2 = System.IO.Path.Combine(tempFolder, $\"Optimized.{extension}\");\n\n System.IO.File.WriteAllText(tempFilePath1, originalCode);\n System.IO.File.WriteAllText(tempFilePath2, optimizedCode);\n\n DTE dte = await VS.GetServiceAsync();\n\n await ThreadHelper.JoinableTaskFactory.SwitchToMainThreadAsync();\n\n dte.ExecuteCommand(\"Tools.DiffFiles\", $\"\\\"{tempFilePath1}\\\" \\\"{tempFilePath2}\\\"\");\n }", "docstring": "/// ", "url": "https://github.com/jeffdapaz/VisualChatGPTStudio/blob/b393c71d8c8ddeb12d13c5a77e63caf39cd734ab/VisualChatGPTStudioShared/Utils/DiffView.cs#L17-L33", "sha": "b393c71d8c8ddeb12d13c5a77e63caf39cd734ab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ResourceObjectUtils.AddItemAndRet", "code": "public static T AddItemAndRet(this ResourceFolder folder, T item) where T : BaseResource {\n folder.AddItem(item);\n return item;\n }", "docstring": "/// ", "url": "https://github.com/AngryCarrot789/FramePFX/blob/73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8/FramePFX/Editing/ResourceManaging/ResourceObjectUtils.cs#L29-L32", "sha": "73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActivityTask.GetAwaiter", "code": "public new TaskAwaiter GetAwaiter() => this.Task.GetAwaiter();", "docstring": "/// ", "url": "https://github.com/AngryCarrot789/FramePFX/blob/73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8/FramePFX/Tasks/ActivityTask.cs#L168-L168", "sha": "73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ArrayUtils.Hash", "code": "public static int Hash(T[] array) {\n if (array == null)\n return 0;\n\n IEqualityComparer comparer = EqualityComparer.Default;\n int result = 1;\n foreach (T t in array) {\n result = 31 * result + comparer.GetHashCode(t);\n }\n\n return result;\n }", "docstring": "// Using IEqualityComparer + generic functions is easier than having", "url": "https://github.com/AngryCarrot789/FramePFX/blob/73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8/FramePFX/Utils/ArrayUtils.cs#L29-L40", "sha": "73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TypeUtils.instanceof", "code": "public static bool instanceof(this Type left, Type right) {\n return right.IsAssignableFrom(left);\n }", "docstring": "/// ", "url": "https://github.com/AngryCarrot789/FramePFX/blob/73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8/FramePFX/Utils/TypeUtils.cs#L33-L35", "sha": "73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InheritanceDictionary.GetEffectiveValue", "code": "public T? GetEffectiveValue(Type key) {\n return this.GetOrCreateEntryInternal(key).inheritedItem;\n }", "docstring": "/// ", "url": "https://github.com/AngryCarrot789/FramePFX/blob/73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8/FramePFX/Utils/Collections/InheritanceDictionary.cs#L96-L98", "sha": "73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ObservableItemProcessor.MakeSimple", "code": "public static ObservableItemProcessorSimple MakeSimple(IObservableList list, Action? onItemAdded, Action? onItemRemoved) {\n return new ObservableItemProcessorSimple(list, onItemAdded, onItemRemoved);\n }", "docstring": "/// ", "url": "https://github.com/AngryCarrot789/FramePFX/blob/73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8/FramePFX/Utils/Collections/Observable/ObservableItemProcessor.cs#L40-L42", "sha": "73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RateLimitedDispatchAction.InvokeAsync", "code": "public void InvokeAsync(T param) {\n _ = Interlocked.Exchange(ref this.currentValue, new ObjectWrapper(param));\n base.InvokeAsyncCore();\n }", "docstring": "// An object wrapper is required in order to permit InvokeAsync being called with a null value.", "url": "https://github.com/AngryCarrot789/FramePFX/blob/73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8/FramePFX/Utils/RDA/RateLimitedDispatchAction.cs#L321-L324", "sha": "73f9ddb9ddfa9a50755f78bbfcbc19ef45ce61b8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImportExportService.ExportAsync", "code": "public async Task ExportAsync(object input, string fileName, bool isDynamicExcelColumn = true) where T : class\n {\n\n var path = ImportExportUtil.GetFileDir(ref fileName);\n\n fileName = CommonUtils.GetSingleId() + fileName;\n var filePath = Path.Combine(path, fileName);\n using (FileStream fs = new(filePath, FileMode.Create))\n {\n await fs.ExportExcel(input, isDynamicExcelColumn).ConfigureAwait(false);\n }\n var result = _fileService.GetFileStreamResult(filePath, fileName);\n return result;\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Admin.Application/Services/ImportExport/ImportExportService.cs#L38-L51", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SessionService.PageAsync", "code": "public async Task> PageAsync(QueryPageOptions option)\n {\n var ret = new QueryData()\n {\n IsSorted = option.SortOrder != SortOrder.Unset,\n IsFiltered = option.Filters.Count > 0,\n IsAdvanceSearch = option.AdvanceSearches.Count > 0 || option.CustomerSearches.Count > 0,\n IsSearch = option.Searches.Count > 0\n };\n var dataScope = await SysUserService.GetCurrentUserDataScopeAsync().ConfigureAwait(false);\n\n using var db = GetDB();\n var query = db.GetQuery(option)\n .WhereIF(dataScope != null && dataScope?.Count > 0, u => dataScope.Contains(u.OrgId))//在指定机构列表查询\n .WhereIF(dataScope?.Count == 0, u => u.CreateUserId == UserManager.UserId)\n .WhereIF(!option.SearchText.IsNullOrWhiteSpace(), a => a.Account.Contains(option.SearchText!));\n\n if (option.IsPage)\n {\n RefAsync totalCount = 0;\n\n var items = await query.ToPageListAsync(option.PageIndex, option.PageItems, totalCount).ConfigureAwait(false);\n\n var verificatInfoDicts = _verificatInfoService.GetListByUserIds(items.Select(a => a.Id).ToList()).GroupBy(a => a.UserId).ToDictionary(a => a.Key, a => a.ToList());\n\n var r = items.Select((it) =>\n {\n var reuslt = it.Adapt();\n if (verificatInfoDicts.TryGetValue(it.Id, out var verificatInfos))\n {\n SessionService.GetTokenInfos(verificatInfos);//获取剩余时间\n reuslt.VerificatCount = verificatInfos.Count;//令牌数量\n reuslt.VerificatSignList = verificatInfos;//令牌列表\n\n //如果有mqtt客户端ID就是在线\n reuslt.Online = verificatInfos.Any(it => it.ClientIds.Count > 0);\n }\n\n return reuslt;\n }).ToList();\n\n ret.TotalCount = totalCount;\n ret.Items = r;\n }\n else if (option.IsVirtualScroll)\n {\n RefAsync totalCount = 0;\n\n var items = await query.ToPageListAsync(option.StartIndex, option.PageItems, totalCount).ConfigureAwait(false);\n var verificatInfoDicts = _verificatInfoService.GetListByUserIds(items.Select(a => a.Id).ToList()).GroupBy(a => a.UserId).ToDictionary(a => a.Key, a => a.ToList());\n\n var r = items.Select((it) =>\n {\n var reuslt = it.Adapt();\n if (verificatInfoDicts.TryGetValue(it.Id, out var verificatInfos))\n {\n SessionService.GetTokenInfos(verificatInfos);//获取剩余时间\n reuslt.VerificatCount = verificatInfos.Count;//令牌数量\n reuslt.VerificatSignList = verificatInfos;//令牌列表\n\n //如果有mqtt客户端ID就是在线\n reuslt.Online = verificatInfos.Any(it => it.ClientIds.Count > 0);\n }\n\n return reuslt;\n }).ToList();\n ret.TotalCount = totalCount;\n ret.Items = r;\n }\n else\n {\n var items = await query.ToListAsync().ConfigureAwait(false);\n\n var verificatInfoDicts = _verificatInfoService.GetListByUserIds(items.Select(a => a.Id).ToList()).GroupBy(a => a.UserId).ToDictionary(a => a.Key, a => a.ToList());\n\n var r = items.Select((it) =>\n {\n var reuslt = it.Adapt();\n if (verificatInfoDicts.TryGetValue(it.Id, out var verificatInfos))\n {\n SessionService.GetTokenInfos(verificatInfos);//获取剩余时间\n reuslt.VerificatCount = verificatInfos.Count;//令牌数量\n reuslt.VerificatSignList = verificatInfos;//令牌列表\n\n //如果有mqtt客户端ID就是在线\n reuslt.Online = verificatInfos.Any(it => it.ClientIds.Count > 0);\n }\n\n return reuslt;\n }).ToList();\n ret.TotalCount = items.Count;\n ret.Items = r;\n }\n return ret;\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Admin.Application/Services/Session/SessionService.cs#L47-L141", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ConfigurableOptionsServiceCollectionExtensions.AddConfigurableOptions", "code": "public static IServiceCollection AddConfigurableOptions(this IServiceCollection services)\n where TOptions : class, IConfigurableOptions\n {\n var optionsType = typeof(TOptions);\n\n // 获取选项配置\n var (optionsSettings, path) = Penetrates.GetOptionsConfiguration(optionsType);\n\n // 配置选项(含验证信息)\n var configurationRoot = App.Configuration;\n var optionsConfiguration = configurationRoot.GetSection(path);\n\n // 配置选项监听\n if (typeof(IConfigurableOptionsListener).IsAssignableFrom(optionsType))\n {\n var onListenerMethod = optionsType.GetMethod(nameof(IConfigurableOptionsListener.OnListener));\n if (onListenerMethod != null)\n {\n // 监听全局配置改变,目前该方式存在触发两次的 bug:https://github.com/dotnet/aspnetcore/issues/2542\n ChangeToken.OnChange(() => configurationRoot.GetReloadToken(), ((Action)(() =>\n {\n var options = optionsConfiguration.Get();\n if (options != null) onListenerMethod.Invoke(options, new object[] { options, optionsConfiguration });\n })).Debounce());\n }\n }\n\n var optionsConfigure = services.AddOptions()\n .Bind(optionsConfiguration, options =>\n {\n options.BindNonPublicProperties = true; // 绑定私有变量\n })\n .ValidateDataAnnotations()\n .ValidateOnStart();\n\n // 实现 Key 映射\n services.PostConfigureAll(options =>\n {\n // 查找所有贴了 MapSettings 的键值对\n var remapKeys = optionsType.GetProperties(BindingFlags.Instance | BindingFlags.Public)\n .Where(u => u.IsDefined(typeof(MapSettingsAttribute), true));\n if (!remapKeys.Any()) return;\n\n foreach (var prop in remapKeys)\n {\n var propType = prop.PropertyType;\n var realKey = prop.GetCustomAttribute(true).Path;\n var realValue = configurationRoot.GetValue(propType, $\"{path}:{realKey}\");\n prop.SetValue(options, realValue);\n }\n });\n\n // 配置复杂验证后后期配置\n var validateInterface = optionsType.GetInterfaces()\n .FirstOrDefault(u => u.IsGenericType && typeof(IConfigurableOptions).IsAssignableFrom(u.GetGenericTypeDefinition()));\n if (validateInterface != null)\n {\n var genericArguments = validateInterface.GenericTypeArguments;\n\n // 配置复杂验证\n if (genericArguments.Length > 1)\n {\n services.TryAddEnumerable(ServiceDescriptor.Singleton(typeof(IValidateOptions), genericArguments.Last()));\n }\n\n // 配置后期配置\n var postConfigureMethod = optionsType.GetMethod(nameof(IConfigurableOptions.PostConfigure));\n if (postConfigureMethod != null)\n {\n if (optionsSettings?.PostConfigureAll != true)\n optionsConfigure.PostConfigure(options => postConfigureMethod.Invoke(options, new object[] { options, optionsConfiguration }));\n else\n services.PostConfigureAll(options => postConfigureMethod.Invoke(options, new object[] { options, optionsConfiguration }));\n }\n }\n\n return services;\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Furion/ConfigurableOptions/Extensions/ConfigurableOptionsServiceCollectionExtensions.cs#L37-L114", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MD5Encryption.Compare", "code": "public static bool Compare(string text, string hash, bool uppercase = false, bool is16 = false)\n {\n return Compare(Encoding.UTF8.GetBytes(text), hash, uppercase, is16);\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Furion/DataEncryption/Encryptions/MD5Encryption.cs#L31-L34", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Scoped.Create", "code": "public static void Create(Action handler, IServiceScopeFactory scopeFactory = default)\n {\n CreateAsync(async (fac, scope) =>\n {\n handler(fac, scope);\n await Task.CompletedTask.ConfigureAwait(false);\n }, scopeFactory).GetAwaiter().GetResult();\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Furion/DependencyInjection/Scoped.cs#L27-L34", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewtonsoftJsonExtensions.AddDateTimeTypeConverters", "code": "public static IList AddDateTimeTypeConverters(this IList converters, string outputFormat = \"yyyy-MM-dd HH:mm:ss\", bool localized = false)\n {\n converters.Add(new NewtonsoftJsonDateTimeJsonConverter(outputFormat, localized));\n converters.Add(new NewtonsoftNullableJsonDateTimeJsonConverter(outputFormat, localized));\n\n converters.Add(new NewtonsoftJsonDateTimeOffsetJsonConverter(outputFormat, localized));\n converters.Add(new NewtonsoftJsonNullableDateTimeOffsetJsonConverter(outputFormat, localized));\n\n return converters;\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Furion/JsonSerialization/Extensions/NewtonsoftJsonExtensions.cs#L29-L38", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SpecificationDocumentServiceCollectionExtensions.AddSpecificationDocuments", "code": "public static IMvcBuilder AddSpecificationDocuments(this IMvcBuilder mvcBuilder, Action configure = default)\n {\n mvcBuilder.Services.AddSpecificationDocuments(configure);\n\n return mvcBuilder;\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Furion/SpecificationDocument/Extensions/SpecificationDocumentServiceCollectionExtensions.cs#L33-L38", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UnifyResultServiceCollectionExtensions.AddUnifyResult", "code": "public static IMvcBuilder AddUnifyResult(this IMvcBuilder mvcBuilder)\n {\n mvcBuilder.Services.AddUnifyResult();\n\n return mvcBuilder;\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Furion/UnifyResult/Extensions/UnifyResultServiceCollectionExtensions.cs#L32-L37", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewObjectExtensions.GetAssembly", "code": "internal static Assembly? GetAssembly(this object? obj) => obj?.GetType().Assembly;", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Furion/V5_Experience/Core/Extensions/ObjectExtensions.cs#L35-L35", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HttpMultipartFormDataBuilderDeclarativeExtractor.Extract", "code": "public void Extract(HttpRequestBuilder httpRequestBuilder, HttpDeclarativeExtractorContext context)\n {\n // 尝试解析单个 Action 类型参数\n if (context.Args.SingleOrDefault(u => u is Action) is not\n Action multipartFormDataBuilderAction)\n {\n return;\n }\n\n // 处理和 [Multipart] 特性冲突问题\n if (httpRequestBuilder.MultipartFormDataBuilder is not null)\n {\n multipartFormDataBuilderAction.Invoke(httpRequestBuilder.MultipartFormDataBuilder);\n }\n else\n {\n // 设置多部分表单内容\n httpRequestBuilder.SetMultipartContent(multipartFormDataBuilderAction);\n }\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.Furion/V5_Experience/HttpRemote/Declarative/Extractors/HttpMultipartFormDataBuilderDeclarativeExtractor.cs#L20-L39", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FileUtil.ReadFile", "code": "public static string ReadFile(string Path, Encoding? encoding = default)\n {\n encoding ??= Encoding.UTF8;\n if (!File.Exists(Path))\n {\n return null;\n }\n\n StreamReader streamReader = new StreamReader(Path, encoding);\n string result = streamReader.ReadToEnd();\n streamReader.Close();\n streamReader.Dispose();\n return result;\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.NewLife.X/Common/FileUtil.cs#L23-L36", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InIConfigProvider.Init", "code": "public override void Init(String value)\n {\n // 加上默认后缀\n if (!value.IsNullOrEmpty() && Path.GetExtension(value).IsNullOrEmpty()) value += \".ini\";\n\n base.Init(value);\n }", "docstring": "/// 初始化", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.NewLife.X/Configuration/IniConfigProvider.cs#L13-L19", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MD5PasswordProvider.Hash", "code": "public String Hash(String password) => password.MD5();", "docstring": "/// 对密码进行散列处理,此处可以加盐,结果保存在数据库", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Admin/ThingsGateway.NewLife.X/Security/IPasswordProvider.cs#L41-L41", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VariableObjectSourceGenerator.Initialize", "code": "public void Initialize(GeneratorInitializationContext context)\n {\n //Debugger.Launch();\n context.RegisterForPostInitialization(a =>\n {\n a.AddSource(nameof(m_generatorVariableAttribute), m_generatorVariableAttribute);\n });\n context.RegisterForSyntaxNotifications(() => new VariableSyntaxReceiver());\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Foundation/ThingsGateway.Foundation.SourceGenerator/SourceGenerator/VariableObjectSourceGenerator.cs#L53-L61", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RuntimeInfoController.GetChannelListAsync", "code": "[HttpGet(\"channelList\")]\n [DisplayName(\"获取通道信息\")]\n public async Task> GetChannelListAsync(ChannelPageInput input)\n {\n\n var channelRuntimes = await GlobalData.GetCurrentUserChannels().ConfigureAwait(false);\n\n var data = channelRuntimes\n .Select(a => a.Value)\n .WhereIF(!string.IsNullOrEmpty(input.Name), u => u.Name.Contains(input.Name))\n .WhereIF(!string.IsNullOrEmpty(input.PluginName), u => u.PluginName == input.PluginName)\n .WhereIF(input.PluginType != null, u => u.PluginType == input.PluginType)\n .ToPagedList(input);\n return data;\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Gateway/ThingsGateway.Gateway.Application/Controller/RuntimeInfoController.cs#L37-L51", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UpdateZipFileInfo.OnInitialized", "code": "protected override void OnInitialized()\n {\n base.OnInitialized();\n HeaderText = ManagementLocalizer[nameof(HeaderText)];\n }", "docstring": "/// ", "url": "https://github.com/ThingsGateway/ThingsGateway/blob/c355968addb85577c008fc6ea9ba5b58e08ce784/src/Gateway/ThingsGateway.Management/Update/Update/UpdateZipFileInfo.razor.cs#L42-L46", "sha": "c355968addb85577c008fc6ea9ba5b58e08ce784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UpdateModel.Deserialize", "code": "public static UpdateModel Deserialize(string json)\n {\n var obj = JsonSerializer.Deserialize(json, UpdateModelJsonContext.Default.UpdateModel);\n\n return obj;\n }", "docstring": "/// ", "url": "https://github.com/d2phap/ExifGlass/blob/ca73fd2105de29b7f6bec9f82f715b51cdbebc82/Source/ExifGlass.Core/Settings/UpdateModel.cs#L48-L53", "sha": "ca73fd2105de29b7f6bec9f82f715b51cdbebc82"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SteamServerStats.RequestUserStatsAsync", "code": "public static async Task RequestUserStatsAsync(SteamId steamid)\n {\n var r = await Internal.RequestUserStats(steamid);\n if (!r.HasValue) return Result.Fail;\n return r.Value.Result;\n }", "docstring": "/// ", "url": "https://github.com/Lakatrazz/BONELAB-Fusion/blob/8889d7008e90c36cdcb8ba6bdf15a499d8f17ebb/LabFusion/dependencies/Facepunch.Steamworks/SteamServerStats.cs#L18-L23", "sha": "8889d7008e90c36cdcb8ba6bdf15a499d8f17ebb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImageLoader.DiskCacheContains", "code": "public static bool DiskCacheContains(string url) => File.Exists(DiskCachePath(url));", "docstring": "/// ", "url": "https://github.com/IvanMurzak/Unity-ImageLoader/blob/8f9ebd0e03a8d465adfc659b9124a504fba3c177/Assets/_PackageRoot/Runtime/ImageLoader.DiskCache.cs#L52-L52", "sha": "8f9ebd0e03a8d465adfc659b9124a504fba3c177"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerClientAttributeProcessor.InjectGuardParameters", "code": "static void InjectGuardParameters(MethodDefinition md, ILProcessor worker, Instruction top)\n {\n int offset = md.Resolve().IsStatic ? 0 : 1;\n for (int index = 0; index < md.Parameters.Count; index++)\n {\n ParameterDefinition param = md.Parameters[index];\n if (param.IsOut)\n {\n TypeReference elementType = param.ParameterType.GetElementType();\n\n md.Body.Variables.Add(new VariableDefinition(elementType));\n md.Body.InitLocals = true;\n\n worker.InsertBefore(top, worker.Create(OpCodes.Ldarg, index + offset));\n worker.InsertBefore(top, worker.Create(OpCodes.Ldloca_S, (byte)(md.Body.Variables.Count - 1)));\n worker.InsertBefore(top, worker.Create(OpCodes.Initobj, elementType));\n worker.InsertBefore(top, worker.Create(OpCodes.Ldloc, md.Body.Variables.Count - 1));\n worker.InsertBefore(top, worker.Create(OpCodes.Stobj, elementType));\n }\n }\n }", "docstring": "// this is required to early-out from a function with \"ref\" or \"out\" parameters", "url": "https://github.com/smartgrass/XCSkillEditor_Unity/blob/a1ea899b4504eff4ab64de33abf98681f4409f82/Assets/Mirror/Editor/Weaver/Processors/ServerClientAttributeProcessor.cs#L117-L137", "sha": "a1ea899b4504eff4ab64de33abf98681f4409f82"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NetworkRoomManagerExt.OnRoomServerSceneChanged", "code": "public override void OnRoomServerSceneChanged(string sceneName)\n {\n // spawn the initial batch of Rewards\n if (sceneName == GameplayScene)\n {\n Spawner.InitialSpawn();\n }\n }", "docstring": "/// ", "url": "https://github.com/smartgrass/XCSkillEditor_Unity/blob/a1ea899b4504eff4ab64de33abf98681f4409f82/Assets/Mirror/Examples/Room/Scripts/NetworkRoomManagerExt.cs#L16-L23", "sha": "a1ea899b4504eff4ab64de33abf98681f4409f82"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BiquadFilterNode.CreateAsync", "code": "public static new async Task CreateAsync(IJSRuntime jSRuntime, IJSObjectReference jSReference)\n {\n return await CreateAsync(jSRuntime, jSReference, new());\n }", "docstring": "/// ", "url": "https://github.com/KristofferStrube/Blazor.WebAudio/blob/ea1acdd530ee4dc07e3efda4bc0712447b0af18a/src/KristofferStrube.Blazor.WebAudio/AudioNodes/BiquadFilterNode.cs#L21-L24", "sha": "ea1acdd530ee4dc07e3efda4bc0712447b0af18a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChannelSplitterNode.CreateAsync", "code": "public static new async Task CreateAsync(IJSRuntime jSRuntime, IJSObjectReference jSReference)\n {\n return await CreateAsync(jSRuntime, jSReference, new());\n }", "docstring": "/// ", "url": "https://github.com/KristofferStrube/Blazor.WebAudio/blob/ea1acdd530ee4dc07e3efda4bc0712447b0af18a/src/KristofferStrube.Blazor.WebAudio/AudioNodes/ChannelSplitterNode.cs#L22-L25", "sha": "ea1acdd530ee4dc07e3efda4bc0712447b0af18a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AudioParamMap.CreateAsync", "code": "public static async Task CreateAsync(IJSRuntime jSRuntime, IJSObjectReference jSReference)\n {\n return await CreateAsync(jSRuntime, jSReference, new());\n }", "docstring": "/// ", "url": "https://github.com/KristofferStrube/Blazor.WebAudio/blob/ea1acdd530ee4dc07e3efda4bc0712447b0af18a/src/KristofferStrube.Blazor.WebAudio/AudioWorklet/AudioParamMap.cs#L14-L17", "sha": "ea1acdd530ee4dc07e3efda4bc0712447b0af18a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HashingHelper.CreatePasswordHash", "code": "public static void CreatePasswordHash(string password, out byte[] passwordHash, out byte[] passwordSalt)\n {\n using HMACSHA512 hmac = new();\n\n passwordSalt = hmac.Key;\n passwordHash = hmac.ComputeHash(Encoding.UTF8.GetBytes(password));\n }", "docstring": "/// ", "url": "https://github.com/kodlamaio-projects/nArchitecture.Core/blob/9561d89f545169c3c2078c229c3a62dddf2886ad/src/Core.Security/Hashing/HashingHelper.cs#L11-L17", "sha": "9561d89f545169c3c2078c229c3a62dddf2886ad"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TestExplicit.GetEnumerator", "code": "public IEnumerator GetEnumerator() => new TestEnumerator(_list);", "docstring": "//IEnumerator> IEnumerable>.GetEnumerator()", "url": "https://github.com/MoaidHathot/Dumpify/blob/78ef11df642fac68e6676c08c322a79ce30e25f9/src/Dumpify.Playground/Program.cs#L717-L717", "sha": "78ef11df642fac68e6676c08c322a79ce30e25f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WorldNormalInputsNode.DrawProperties", "code": "public override void DrawProperties()\n\t\t{\n\t\t\tbase.DrawProperties();\n\t\t\tm_perPixel = EditorGUILayoutToggleLeft( PerPixelLabelStr, m_perPixel );\n\t\t}", "docstring": "//public override void Destroy()", "url": "https://github.com/falseeeeeeeeee/ShaderLibrary/blob/5d3931c2fd7c8478c984bb2f0be2d0875d6352d2/ShaderLib_2021/Assets/Plugins/AmplifyShaderEditor/Plugins/Editor/Nodes/SurfaceShaderInputs/WorldNormalInputsNode.cs#L45-L49", "sha": "5d3931c2fd7c8478c984bb2f0be2d0875d6352d2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CreateCustomShader.CreateShaderTemplate", "code": "private static void CreateShaderTemplate(string templatePath, string defaultFileName)\n {\n ProjectWindowUtil.StartNameEditingIfProjectWindowExists(0,\n ScriptableObject.CreateInstance(),\n GetSelectedPathOrFallback() + \"/\" + defaultFileName,\n null,\n templatePath);\n }", "docstring": "// 通用方法,用于从给定的模板路径创建Shader", "url": "https://github.com/falseeeeeeeeee/ShaderLibrary/blob/5d3931c2fd7c8478c984bb2f0be2d0875d6352d2/ShaderLib_2022/Assets/Plugins/CreateCustomShader/Editor/CreateCustomShader.cs#L11-L18", "sha": "5d3931c2fd7c8478c984bb2f0be2d0875d6352d2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MainDrawer.GetPropertyHeight", "code": "public override float GetPropertyHeight(MaterialProperty prop, string label, MaterialEditor editor)\n\t\t{\n\t\t\treturn _height;\n\t\t}", "docstring": "// Call in custom shader gui", "url": "https://github.com/falseeeeeeeeee/ShaderLibrary/blob/5d3931c2fd7c8478c984bb2f0be2d0875d6352d2/ShaderLib_2022/Assets/Plugins/LightWeightShaderGUI/Editor/ShaderDrawer.cs#L91-L94", "sha": "5d3931c2fd7c8478c984bb2f0be2d0875d6352d2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FBO.Create", "code": "public static FBO Create()\n {\n FBO fbo = new FBO();\n fbo.m_width = 0;\n fbo.m_height = 0;\n return fbo;\n }", "docstring": "// Create a dummy FBO", "url": "https://github.com/WeakKnight/GDC23_PracticalMobileRendering/blob/cb8f6f4e6933abe6a3d1b72c821864f239b14338/PracticalMobileRendering/Assets/Scripts/RenderPipeline/Runtime/LowLevelGraphicsAPI/FBO.cs#L149-L155", "sha": "cb8f6f4e6933abe6a3d1b72c821864f239b14338"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AIDecisionTree.Think", "code": "public static void Think(AIKnowledge knowledge, AIDecision decision)\n\t\t{\n\t\t\tvar conf = ConfigAIDecisionTreeCategory.Instance.Get(knowledge.DecisionArchetype);\n\t\t\tif (conf != null)\n\t\t\t{\n\t\t\t\tif (knowledge.CombatComponent != null && knowledge.CombatComponent.IsInCombat)\n\t\t\t\t{\n\t\t\t\t\tif (conf.CombatNode != null)\n\t\t\t\t\t\tHandler(knowledge, decision, conf.CombatNode);\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\tif (conf.Node != null)\n\t\t\t\t\t\tHandler(knowledge, decision, conf.Node);\n\t\t\t\t}\n\t\t\t}\n\t\t}", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Assets/Scripts/Code/Game/Component/AI/Decision/AIDecisionTree.cs#L13-L29", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ResourcesManager.IsProcessRunning", "code": "public bool IsProcessRunning()\n {\n return this.loadingOp.Count > 0;\n }", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Assets/Scripts/Code/Module/Resource/ResourcesManager.cs#L53-L56", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Writer.WriteCommonVal", "code": "[MethodImpl(MethodImplOptions.AggressiveInlining)]\n public void WriteCommonVal(T val) =>\n Serializer.Serialize(typeof(T), val, this, option, false);", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Assets/Scripts/ThirdParty/Nino/Serialization/Writer.Generic.cs#L16-L18", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StringMgr.Split", "code": "public static unsafe string[] Split(this ReadOnlySpan str, char separator)\n {\n if (str.IsEmpty)\n {\n return Array.Empty();\n }\n\n var indexes = ObjectPool>.Request();\n var index = 0;\n int i = 0;\n int max = str.Length;\n fixed (char* ptr = &str.GetPinnableReference())\n {\n var cPtr = ptr;\n while (i < max)\n {\n if (*cPtr++ == separator)\n {\n indexes[index++] = i;\n }\n\n i++;\n }\n \n string[] ret = new string[index + 1];\n var retSpan = ret.AsSpan();\n \n int start = 0;\n \n for (i = 0; i < index; i++)\n {\n ref int end = ref indexes.Data[i];\n if(start >= max || start == end)\n {\n retSpan[i] = string.Empty;\n }\n else\n {\n retSpan[i] = new string(ptr, start, end - start);\n }\n start = end + 1;\n }\n\n if (start < max)\n {\n retSpan[index] = new string(ptr, start, max - start);\n }\n else\n {\n retSpan[index] = string.Empty;\n }\n \n ObjectPool>.Return(indexes);\n return ret;\n }\n }", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Assets/Scripts/ThirdParty/Nino/Shared/Mgr/StringMgr.cs#L15-L70", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TaskCopyBuildinFiles.CopyBuildinFilesToStreaming", "code": "private void CopyBuildinFilesToStreaming(BuildParametersContext buildParametersContext, ManifestContext manifestContext)\n\t\t{\n\t\t\tECopyBuildinFileOption option = buildParametersContext.Parameters.CopyBuildinFileOption;\n\t\t\tstring packageOutputDirectory = buildParametersContext.GetPackageOutputDirectory();\n\t\t\tstring streamingAssetsDirectory = buildParametersContext.GetStreamingAssetsDirectory();\n\t\t\tstring buildPackageName = buildParametersContext.Parameters.PackageName;\n\t\t\tstring buildPackageVersion = buildParametersContext.Parameters.PackageVersion;\n\n\t\t\t// 加载补丁清单\n\t\t\tPackageManifest manifest = manifestContext.Manifest;\n\n\t\t\t// 清空流目录\n\t\t\tif (option == ECopyBuildinFileOption.ClearAndCopyAll || option == ECopyBuildinFileOption.ClearAndCopyByTags)\n\t\t\t{\n\t\t\t\tEditorTools.ClearFolder(streamingAssetsDirectory);\n\t\t\t}\n\t\t\t\n\t\t\tbool copy = false;\n\t\t\t// 拷贝文件列表(所有文件)\n\t\t\tif (option == ECopyBuildinFileOption.ClearAndCopyAll || option == ECopyBuildinFileOption.OnlyCopyAll)\n\t\t\t{\n\t\t\t\tcopy = true;\n\t\t\t\tforeach (var packageBundle in manifest.BundleList)\n\t\t\t\t{\n\t\t\t\t\tstring sourcePath = $\"{packageOutputDirectory}/{packageBundle.FileName}\";\n\t\t\t\t\tstring destPath = $\"{streamingAssetsDirectory}/{packageBundle.FileName}\";\n\t\t\t\t\tEditorTools.CopyFile(sourcePath, destPath, true);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// 拷贝文件列表(带标签的文件)\n\t\t\tif (option == ECopyBuildinFileOption.ClearAndCopyByTags || option == ECopyBuildinFileOption.OnlyCopyByTags)\n\t\t\t{\n\t\t\t\tstring[] tags = buildParametersContext.Parameters.CopyBuildinFileTags.Split(';');\n\t\t\t\tforeach (var packageBundle in manifest.BundleList)\n\t\t\t\t{\n\t\t\t\t\tif (packageBundle.HasTag(tags) == false)\n\t\t\t\t\t\tcontinue;\n\t\t\t\t\tcopy = true;\n\t\t\t\t\tstring sourcePath = $\"{packageOutputDirectory}/{packageBundle.FileName}\";\n\t\t\t\t\tstring destPath = $\"{streamingAssetsDirectory}/{packageBundle.FileName}\";\n\t\t\t\t\tEditorTools.CopyFile(sourcePath, destPath, true);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (copy)\n\t\t\t{\n\t\t\t\t// 拷贝补丁清单文件\n\t\t\t\t{\n\t\t\t\t\tstring fileName = YooAssetSettingsData.GetManifestBinaryFileName(buildPackageName, buildPackageVersion);\n\t\t\t\t\tstring sourcePath = $\"{packageOutputDirectory}/{fileName}\";\n\t\t\t\t\tstring destPath = $\"{streamingAssetsDirectory}/{fileName}\";\n\t\t\t\t\tEditorTools.CopyFile(sourcePath, destPath, true);\n\t\t\t\t}\n\n\t\t\t\t// 拷贝补丁清单哈希文件\n\t\t\t\t{\n\t\t\t\t\tstring fileName = YooAssetSettingsData.GetPackageHashFileName(buildPackageName, buildPackageVersion);\n\t\t\t\t\tstring sourcePath = $\"{packageOutputDirectory}/{fileName}\";\n\t\t\t\t\tstring destPath = $\"{streamingAssetsDirectory}/{fileName}\";\n\t\t\t\t\tEditorTools.CopyFile(sourcePath, destPath, true);\n\t\t\t\t}\n\n\t\t\t\t// 拷贝补丁清单版本文件\n\t\t\t\t{\n\t\t\t\t\tstring fileName = YooAssetSettingsData.GetPackageVersionFileName(buildPackageName);\n\t\t\t\t\tstring sourcePath = $\"{packageOutputDirectory}/{fileName}\";\n\t\t\t\t\tstring destPath = $\"{streamingAssetsDirectory}/{fileName}\";\n\t\t\t\t\tEditorTools.CopyFile(sourcePath, destPath, true);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// 刷新目录\n\t\t\tAssetDatabase.Refresh();\n\t\t\tBuildLogger.Log($\"内置文件拷贝完成:{streamingAssetsDirectory}\");\n\t\t}", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Modules/com.tuyoogame.yooasset/Editor/AssetBundleBuilder/BuildTasks/TaskCopyBuildinFiles.cs#L29-L104", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RawBundleWebLoader.Update", "code": "public override void Update()\n\t\t{\n\t\t\tif (_steps == ESteps.Done)\n\t\t\t\treturn;\n\n\t\t\tif (_steps == ESteps.None)\n\t\t\t{\n\t\t\t\tif (MainBundleInfo.LoadMode == BundleInfo.ELoadMode.LoadFromRemote)\n\t\t\t\t{\n\t\t\t\t\t_steps = ESteps.Download;\n\t\t\t\t\tFileLoadPath = MainBundleInfo.Bundle.CachedDataFilePath;\n\t\t\t\t}\n\t\t\t\telse if (MainBundleInfo.LoadMode == BundleInfo.ELoadMode.LoadFromStreaming)\n\t\t\t\t{\n\t\t\t\t\t_steps = ESteps.Website;\n\t\t\t\t\tFileLoadPath = MainBundleInfo.Bundle.CachedDataFilePath;\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\tthrow new System.NotImplementedException(MainBundleInfo.LoadMode.ToString());\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// 1. 下载远端文件\n\t\t\tif (_steps == ESteps.Download)\n\t\t\t{\n\t\t\t\tint failedTryAgain = Impl.DownloadFailedTryAgain;\n\t\t\t\t_downloader = DownloadSystem.CreateDownload(MainBundleInfo, failedTryAgain);\n\t\t\t\t_downloader.SendRequest();\n\t\t\t\t_steps = ESteps.CheckDownload;\n\t\t\t}\n\n\t\t\t// 2. 检测下载结果\n\t\t\tif (_steps == ESteps.CheckDownload)\n\t\t\t{\n\t\t\t\tDownloadProgress = _downloader.DownloadProgress;\n\t\t\t\tDownloadedBytes = _downloader.DownloadedBytes;\n\t\t\t\tif (_downloader.IsDone() == false)\n\t\t\t\t\treturn;\n\n\t\t\t\tif (_downloader.HasError())\n\t\t\t\t{\n\t\t\t\t\t_steps = ESteps.Done;\n\t\t\t\t\tStatus = EStatus.Failed;\n\t\t\t\t\tLastError = _downloader.GetLastError();\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\t_steps = ESteps.CheckFile;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// 3. 从站点下载\n\t\t\tif (_steps == ESteps.Website)\n\t\t\t{\n\t\t\t\tint failedTryAgain = Impl.DownloadFailedTryAgain;\n\t\t\t\tvar bundleInfo = ManifestTools.ConvertToUnpackInfo(MainBundleInfo.Bundle);\n\t\t\t\t_website = DownloadSystem.CreateDownload(bundleInfo, failedTryAgain);\n\t\t\t\t_website.SendRequest();\n\t\t\t\t_steps = ESteps.CheckWebsite;\n\t\t\t}\n\n\t\t\t// 4. 检测站点下载\n\t\t\tif (_steps == ESteps.CheckWebsite)\n\t\t\t{\n\t\t\t\tDownloadProgress = _website.DownloadProgress;\n\t\t\t\tDownloadedBytes = _website.DownloadedBytes;\n\t\t\t\tif (_website.IsDone() == false)\n\t\t\t\t\treturn;\n\n\t\t\t\tif (_website.HasError())\n\t\t\t\t{\n\t\t\t\t\t_steps = ESteps.Done;\n\t\t\t\t\tStatus = EStatus.Failed;\n\t\t\t\t\tLastError = _website.GetLastError();\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\t_steps = ESteps.CheckFile;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// 5. 检测结果\n\t\t\tif (_steps == ESteps.CheckFile)\n\t\t\t{\n\t\t\t\t// 设置下载进度\n\t\t\t\tDownloadProgress = 1f;\n\t\t\t\tDownloadedBytes = (ulong)MainBundleInfo.Bundle.FileSize;\n\n\t\t\t\t_steps = ESteps.Done;\n\t\t\t\tif (File.Exists(FileLoadPath))\n\t\t\t\t{\n\t\t\t\t\tStatus = EStatus.Succeed;\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\tStatus = EStatus.Failed;\n\t\t\t\t\tLastError = $\"Raw file not found : {FileLoadPath}\";\n\t\t\t\t}\n\t\t\t}\n\t\t}", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Modules/com.tuyoogame.yooasset/Runtime/AssetSystem/Loader/RawBundleWebLoader.cs#L33-L133", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GameManager.OnHandleEventMessage", "code": "private void OnHandleEventMessage(IEventMessage message)\n\t{\n\t\tif(message is SceneEventDefine.ChangeToHomeScene)\n\t\t{\n\t\t\t_machine.ChangeState();\n\t\t}\n\t\telse if(message is SceneEventDefine.ChangeToBattleScene)\n\t\t{\n\t\t\t_machine.ChangeState();\n\t\t}\n\t}", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Modules/com.tuyoogame.yooasset/Samples~/Space Shooter/GameScript/Runtime/GameLogic/GameManager.cs#L53-L63", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StreamingAssetsHelper.Init", "code": "public static void Init()\n\t{\n\t\tif (_isInit == false)\n\t\t{\n\t\t\t_isInit = true;\n\t\t\tvar manifest = Resources.Load(\"BuildinFileManifest\");\n\t\t\tif (manifest != null)\n\t\t\t{\n\t\t\t\tforeach (string fileName in manifest.BuildinFiles)\n\t\t\t\t{\n\t\t\t\t\t_cacheData.Add(fileName);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Modules/com.tuyoogame.yooasset/Samples~/Space Shooter/ThirdParty/StreamingAssetsHelper/StreamingAssetsHelper.cs#L43-L57", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Double3Drawer.PopulateGenericMenu", "code": "public void PopulateGenericMenu(InspectorProperty property, GenericMenu genericMenu)\n {\n double3 value = (double3)property.ValueEntry.WeakSmartValue;\n var vec = new Vector3((float)value.x, (float)value.y, (float)value.z);\n\n if (genericMenu.GetItemCount() > 0)\n {\n genericMenu.AddSeparator(\"\");\n }\n genericMenu.AddItem(new GUIContent(\"Normalize\"), Mathf.Approximately(vec.magnitude, 1f), () => NormalizeEntries(property));\n genericMenu.AddItem(new GUIContent(\"Zero\", \"Set the vector to (0, 0, 0)\"), vec == Vector3.zero, () => SetVector(property, Vector3.zero));\n genericMenu.AddItem(new GUIContent(\"One\", \"Set the vector to (1, 1, 1)\"), vec == Vector3.one, () => SetVector(property, Vector3.one));\n genericMenu.AddSeparator(\"\");\n genericMenu.AddItem(new GUIContent(\"Right\", \"Set the vector to (1, 0, 0)\"), vec == Vector3.right, () => SetVector(property, Vector3.right));\n genericMenu.AddItem(new GUIContent(\"Left\", \"Set the vector to (-1, 0, 0)\"), vec == Vector3.left, () => SetVector(property, Vector3.left));\n genericMenu.AddItem(new GUIContent(\"Up\", \"Set the vector to (0, 1, 0)\"), vec == Vector3.up, () => SetVector(property, Vector3.up));\n genericMenu.AddItem(new GUIContent(\"Down\", \"Set the vector to (0, -1, 0)\"), vec == Vector3.down, () => SetVector(property, Vector3.down));\n genericMenu.AddItem(new GUIContent(\"Forward\", \"Set the vector property to (0, 0, 1)\"), vec == Vector3.forward, () => SetVector(property, Vector3.forward));\n genericMenu.AddItem(new GUIContent(\"Back\", \"Set the vector property to (0, 0, -1)\"), vec == Vector3.back, () => SetVector(property, Vector3.back));\n }", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Packages/com.thridparty.odin/Sirenix/Odin Inspector/Modules/Unity.Mathematics/MathematicsDrawers.cs#L598-L617", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TMP_SpriteAssetEditor.DisplayPageNavigation", "code": "void DisplayPageNavigation(ref int currentPage, int arraySize, int itemsPerPage)\n {\n Rect pagePos = EditorGUILayout.GetControlRect(false, 20);\n pagePos.width /= 3;\n\n int shiftMultiplier = Event.current.shift ? 10 : 1; // Page + Shift goes 10 page forward\n\n // Previous Page\n GUI.enabled = currentPage > 0;\n\n if (GUI.Button(pagePos, \"Previous Page\"))\n {\n currentPage -= 1 * shiftMultiplier;\n //m_isNewPage = true;\n }\n\n // Page Counter\n GUI.enabled = true;\n pagePos.x += pagePos.width;\n int totalPages = (int)(arraySize / (float)itemsPerPage + 0.999f);\n GUI.Label(pagePos, \"Page \" + (currentPage + 1) + \" / \" + totalPages, TMP_UIStyleManager.centeredLabel);\n\n // Next Page\n pagePos.x += pagePos.width;\n GUI.enabled = itemsPerPage * (currentPage + 1) < arraySize;\n\n if (GUI.Button(pagePos, \"Next Page\"))\n {\n currentPage += 1 * shiftMultiplier;\n //m_isNewPage = true;\n }\n\n // Clamp page range\n currentPage = Mathf.Clamp(currentPage, 0, arraySize / itemsPerPage);\n\n GUI.enabled = true;\n }", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Packages/com.unity.textmeshpro/Scripts/Editor/TMP_SpriteAssetEditor.cs#L672-L708", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TMP_MaterialManager.GetStencilMaterial", "code": "public static Material GetStencilMaterial(Material baseMaterial, int stencilID)\n {\n // Check if Material supports masking\n if (!baseMaterial.HasProperty(ShaderUtilities.ID_StencilID))\n {\n Debug.LogWarning(\"Selected Shader does not support Stencil Masking. Please select the Distance Field or Mobile Distance Field Shader.\");\n return baseMaterial;\n }\n\n int baseMaterialID = baseMaterial.GetInstanceID();\n\n // If baseMaterial already has a corresponding masking material, return it.\n for (int i = 0; i < m_materialList.Count; i++)\n {\n if (m_materialList[i].baseMaterial.GetInstanceID() == baseMaterialID && m_materialList[i].stencilID == stencilID)\n {\n m_materialList[i].count += 1;\n\n #if TMP_DEBUG_MODE\n ListMaterials();\n #endif\n\n return m_materialList[i].stencilMaterial;\n }\n }\n\n // No matching masking material found. Create and return a new one.\n\n Material stencilMaterial;\n\n //Create new Masking Material Instance for this Base Material\n stencilMaterial = new Material(baseMaterial);\n stencilMaterial.hideFlags = HideFlags.HideAndDontSave;\n\n #if UNITY_EDITOR\n stencilMaterial.name += \" Masking ID:\" + stencilID;\n #endif\n\n stencilMaterial.shaderKeywords = baseMaterial.shaderKeywords;\n\n // Set Stencil Properties\n ShaderUtilities.GetShaderPropertyIDs();\n stencilMaterial.SetFloat(ShaderUtilities.ID_StencilID, stencilID);\n //stencilMaterial.SetFloat(ShaderUtilities.ID_StencilOp, 0);\n stencilMaterial.SetFloat(ShaderUtilities.ID_StencilComp, 4);\n //stencilMaterial.SetFloat(ShaderUtilities.ID_StencilReadMask, stencilID);\n //stencilMaterial.SetFloat(ShaderUtilities.ID_StencilWriteMask, 0);\n\n MaskingMaterial temp = new MaskingMaterial();\n temp.baseMaterial = baseMaterial;\n temp.stencilMaterial = stencilMaterial;\n temp.stencilID = stencilID;\n temp.count = 1;\n\n m_materialList.Add(temp);\n\n #if TMP_DEBUG_MODE\n ListMaterials();\n #endif\n\n return stencilMaterial;\n }", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Packages/com.unity.textmeshpro/Scripts/Runtime/TMP_MaterialManager.cs#L43-L104", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TimelineAction.Execute", "code": "public abstract bool Execute(ActionContext context);", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Packages/com.unity.timeline/Editor/Actions/TimelineAction.cs#L25-L25", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TrackAction.Execute", "code": "public abstract bool Execute(IEnumerable tracks);", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Packages/com.unity.timeline/Editor/Actions/TrackAction.cs#L27-L27", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnimationPlayableAssetEditor.GetClipOptions", "code": "public override ClipDrawOptions GetClipOptions(TimelineClip clip)\n {\n var clipOptions = base.GetClipOptions(clip);\n var asset = clip.asset as AnimationPlayableAsset;\n\n if (asset != null)\n clipOptions.errorText = GetErrorText(asset, clip.GetParentTrack() as AnimationTrack, clipOptions.errorText);\n\n if (clip.recordable)\n clipOptions.highlightColor = DirectorStyles.Instance.customSkin.colorAnimationRecorded;\n\n return clipOptions;\n }", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Packages/com.unity.timeline/Editor/Animation/AnimationPlayableAssetEditor.cs#L17-L29", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TrackAsset.OnBeforeTrackSerialize", "code": "protected virtual void OnBeforeTrackSerialize() { }", "docstring": "/// ", "url": "https://github.com/526077247/GenshinGamePlay/blob/5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7/Packages/com.unity.timeline/Runtime/AssetUpgrade/TrackUpgrade.cs#L27-L27", "sha": "5b1e4e62ebdc0b189e158ec55b3d3b4b5e3857d7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DisposeUtility.Wrap", "code": "public static IAsyncDisposable Wrap(object? resource) => TryWrap(resource) ?? NoopDisposable.Instance;", "docstring": "/// ", "url": "https://github.com/StephenCleary/StructuredConcurrency/blob/309813256331af44f15ba8add05350428f60c846/src/Nito.StructuredConcurrency/Internals/DisposeUtility.cs#L15-L15", "sha": "309813256331af44f15ba8add05350428f60c846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InterlockedEx.Apply", "code": "public static T Apply(ref T value, Func transformation)\n where T : class\n {\n _ = transformation ?? throw new ArgumentNullException(nameof(transformation));\n\n while (true)\n {\n var localValue = Interlocked.CompareExchange(ref value, null!, null!);\n var modified = transformation(localValue);\n if (Interlocked.CompareExchange(ref value, modified, localValue) == localValue)\n return modified;\n }\n }", "docstring": "/// ", "url": "https://github.com/StephenCleary/StructuredConcurrency/blob/309813256331af44f15ba8add05350428f60c846/src/Nito.StructuredConcurrency/Internals/InterlockedEx.cs#L17-L29", "sha": "309813256331af44f15ba8add05350428f60c846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LogRedirection.GetStackTrace", "code": "private static string GetStackTrace()\n {\n // 通过反射获取ConsoleWindow类\n var consoleWindowType = typeof(EditorWindow).Assembly.GetType(\"UnityEditor.ConsoleWindow\");\n // 获取窗口实例\n var fieldInfo = consoleWindowType.GetField(\"ms_ConsoleWindow\",\n BindingFlags.Static |\n BindingFlags.NonPublic);\n if (fieldInfo != null)\n {\n var consoleInstance = fieldInfo.GetValue(null);\n if (consoleInstance != null)\n if (EditorWindow.focusedWindow == (EditorWindow)consoleInstance)\n {\n // 获取m_ActiveText成员\n fieldInfo = consoleWindowType.GetField(\"m_ActiveText\",\n BindingFlags.Instance |\n BindingFlags.NonPublic);\n // 获取m_ActiveText的值\n if (fieldInfo != null)\n {\n var activeText = fieldInfo.GetValue(consoleInstance).ToString();\n return activeText;\n }\n }\n }\n\n return null;\n }", "docstring": "/// ", "url": "https://github.com/Alex-Rachel/GameFramework-Next/blob/4f2fbfa6fc0eb0bfa55c92fd51df295eae3de4b3/UnityProject/Assets/GameScripts/Editor/Extension/Utility/LogRedirection.cs#L95-L123", "sha": "4f2fbfa6fc0eb0bfa55c92fd51df295eae3de4b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReleaseTools.CreateEncryptionInstance", "code": "private static IEncryptionServices CreateEncryptionInstance(string packageName, EBuildPipeline buildPipeline)\n {\n var encryptionClassName = AssetBundleBuilderSetting.GetPackageEncyptionClassName(packageName, buildPipeline);\n var encryptionClassTypes = EditorTools.GetAssignableTypes(typeof(IEncryptionServices));\n var classType = encryptionClassTypes.Find(x => x.FullName != null && x.FullName.Equals(encryptionClassName));\n if (classType != null)\n {\n Debug.Log($\"Use Encryption {classType}\");\n return (IEncryptionServices)Activator.CreateInstance(classType);\n }\n else\n {\n return null;\n }\n }", "docstring": "/// ", "url": "https://github.com/Alex-Rachel/GameFramework-Next/blob/4f2fbfa6fc0eb0bfa55c92fd51df295eae3de4b3/UnityProject/Assets/GameScripts/Editor/ReleaseTools/ReleaseTools.cs#L165-L179", "sha": "4f2fbfa6fc0eb0bfa55c92fd51df295eae3de4b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GameApp.Entrance", "code": "public static void Entrance(object[] objects)\n {\n s_HotfixAssembly = (List)objects[0];\n Log.Warning(\"======= 看到此条日志代表你成功运行了热更新代码 =======\");\n Log.Warning(\"======= Entrance GameApp =======\");\n Instance.InitSystem();\n Instance.Start();\n Utility.Unity.AddUpdateListener(Instance.Update);\n Utility.Unity.AddFixedUpdateListener(Instance.FixedUpdate);\n Utility.Unity.AddLateUpdateListener(Instance.LateUpdate);\n Utility.Unity.AddDestroyListener(Instance.OnDestroy);\n Utility.Unity.AddOnDrawGizmosListener(Instance.OnDrawGizmos);\n Utility.Unity.AddOnApplicationPauseListener(Instance.OnApplicationPause);\n Instance.StartGameLogic();\n }", "docstring": "/// ", "url": "https://github.com/Alex-Rachel/GameFramework-Next/blob/4f2fbfa6fc0eb0bfa55c92fd51df295eae3de4b3/UnityProject/Assets/GameScripts/HotFix/GameLogic/GameApp.cs#L15-L29", "sha": "4f2fbfa6fc0eb0bfa55c92fd51df295eae3de4b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AggregateIdValueConverter.Create", "code": "private static TAggregateId Create(TId id) =>\n (\n Activator.CreateInstance(\n typeof(TAggregateId),\n BindingFlags.Instance | BindingFlags.NonPublic,\n null,\n new object?[] { id },\n null,\n null\n ) as TAggregateId\n )!;", "docstring": "// instantiate AggregateId and pass id to its protected or private constructor", "url": "https://github.com/mehdihadeli/vertical-slice-api-template/blob/3837aefd7d6ed6bf8e32faca932a145f25f0dfb4/src/Shared/EF/Converters/AggregateIdValueConverter.cs#L15-L25", "sha": "3837aefd7d6ed6bf8e32faca932a145f25f0dfb4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CornerFrameLayout.SetRadius", "code": "public void SetRadius(float topLeft, float topRight, float bottomRight, float bottomLeft)\n {\n mRadii[0] = topLeft;\n mRadii[1] = topLeft;\n\n mRadii[2] = topRight;\n mRadii[3] = topRight;\n\n mRadii[4] = bottomRight;\n mRadii[5] = bottomRight;\n\n mRadii[6] = bottomLeft;\n mRadii[7] = bottomLeft;\n Invalidate();\n }", "docstring": "/// ", "url": "https://github.com/sswi/AcrylicView.MAUI/blob/a70ec049b39ba9f5300f27c16d4bf1539e00c373/AcrylicView/Platforms/Android/Drawable/CornerFrameLayout.cs#L47-L61", "sha": "a70ec049b39ba9f5300f27c16d4bf1539e00c373"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StateMachineComponent.SwitchState", "code": "private void SwitchState(State newState, bool callExit = true)\n {\n if (callExit)\n {\n _curState.Exit();\n }\n\n _curState = newState;\n _curState.Enter();\n }", "docstring": "/// ", "url": "https://github.com/CSharpRedotTools/Template/blob/d1b39583643c63beeda68c629ecc6c76cf98df4c/Genres/2D Top Down/Scripts/Components/StateMachineComponent.cs#L39-L48", "sha": "d1b39583643c63beeda68c629ecc6c76cf98df4c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GalStatusSyncResultHelper.ToInfoBarSeverity", "code": "public static InfoBarSeverity ToInfoBarSeverity(this GalStatusSyncResult result)\n {\n switch (result)\n {\n case GalStatusSyncResult.Ok:\n return InfoBarSeverity.Success;\n case GalStatusSyncResult.UnAuthorized:\n case GalStatusSyncResult.NoId:\n case GalStatusSyncResult.Other:\n return InfoBarSeverity.Error;\n case GalStatusSyncResult.NotSupported:\n return InfoBarSeverity.Warning;\n default:\n return InfoBarSeverity.Error;\n }\n }", "docstring": "/// ", "url": "https://github.com/GoldenPotato137/PotatoVN/blob/e8db00158d27c07f868cae89d9aaa5a5bd9c3349/GalgameManager/Enums/GalStatusSyncResult.cs#L24-L39", "sha": "e8db00158d27c07f868cae89d9aaa5a5bd9c3349"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NavigationHelper.NavigateToHomePage", "code": "public static void NavigateToHomePage(INavigationService navigationService, IFilterService? filterService = null,\n IEnumerable? filters = null)\n {\n Debug.Assert(!(filterService is null ^ filters is null)); // 同时为null或同时不为null\n if (filterService is not null && filters is not null)\n {\n filterService.ClearFilters();\n foreach (FilterBase filter in filters)\n {\n filterService.AddFilter(filter);\n if (filter is CategoryFilter c)\n c.Category.LastClicked = DateTime.Now;\n if (filter is SourceFilter s)\n s.Source.LastClicked = DateTime.Now;\n \n }\n }\n navigationService.NavigateTo(typeof(HomeViewModel).FullName!);\n }", "docstring": "/// ", "url": "https://github.com/GoldenPotato137/PotatoVN/blob/e8db00158d27c07f868cae89d9aaa5a5bd9c3349/GalgameManager/Helpers/NavigationHelper.cs#L34-L52", "sha": "e8db00158d27c07f868cae89d9aaa5a5bd9c3349"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TimeToDisplayTimeConverter.Convert", "code": "public static string Convert(int value)\n {\n var timeAsHour = App.GetService().ReadSettingAsync(KeyValues.TimeAsHour).Result;\n if (timeAsHour)\n return value > 60 ? $\"{value / 60}h{value % 60}m\" : $\"{value}m\";\n return $\"{value} {\"Minute\".GetLocalized()}\";\n }", "docstring": "//不需要", "url": "https://github.com/GoldenPotato137/PotatoVN/blob/e8db00158d27c07f868cae89d9aaa5a5bd9c3349/GalgameManager/Helpers/Converter/TimeToDisplayTimeConverter.cs#L24-L30", "sha": "e8db00158d27c07f868cae89d9aaa5a5bd9c3349"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ObservableCollectionExtensions.SyncCollection", "code": "public static void SyncCollection(this ObservableCollection collection, IList other, bool sort = false)\n where T : notnull\n {\n // var delta = other.Count - collection.Count;\n // for (var i = 0; i < delta; i++)\n // collection.Add(other[0]); //内容不总要,只是要填充到对应的总数\n // for (var i = delta; i < 0; i++)\n // collection.RemoveAt(collection.Count - 1);\n //\n // for (var i = 0; i < other.Count; i++) \n // collection[i] = other[i];\n\n HashSet toRemove = new(collection.Where(obj => !other.Contains(obj)));\n HashSet toAdd = new(other.Where(obj => !collection.Contains(obj)));\n foreach (T obj in toRemove)\n collection.Remove(obj);\n foreach (T obj in toAdd)\n collection.Add(obj);\n\n if (!sort) return;\n Dictionary index = new();\n for (var i = 0; i < other.Count; i++)\n index[other[i]] = i;\n collection.Sort((a, b) => index[a].CompareTo(index[b]));\n }", "docstring": "/// ", "url": "https://github.com/GoldenPotato137/PotatoVN/blob/e8db00158d27c07f868cae89d9aaa5a5bd9c3349/GalgameManager/Helpers/Extensions/ObservableCollectionExtensions.cs#L14-L38", "sha": "e8db00158d27c07f868cae89d9aaa5a5bd9c3349"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GalgameUid.Similarity", "code": "public int Similarity(GalgameUid? rhs)\n {\n if (rhs is null) return 0;\n var result = 0;\n result += !PvnId.IsNullOrEmpty() && PvnId == rhs.PvnId ? 1 : 0;\n result += !BangumiId.IsNullOrEmpty() && BangumiId == rhs.BangumiId ? 1 : 0;\n result += !VndbId.IsNullOrEmpty() && VndbId == rhs.VndbId ? 1 : 0;\n result += !CnName.IsNullOrEmpty() && CnName == rhs.CnName ? 1 : 0;\n result += Name == rhs.Name ? 1 : 0;\n return result;\n }", "docstring": "/// ", "url": "https://github.com/GoldenPotato137/PotatoVN/blob/e8db00158d27c07f868cae89d9aaa5a5bd9c3349/GalgameManager/Models/GalgameUid.cs#L24-L34", "sha": "e8db00158d27c07f868cae89d9aaa5a5bd9c3349"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Program.Main", "code": "static void Main(string[] args)\n\t{\n\t\t// if you want to use a different Application Delegate class from \"AppDelegate\"\n\t\t// you can specify it here.\n\t\tUIApplication.Main(args, null, typeof(AppDelegate));\n\t}", "docstring": "// This is the main entry point of the application.", "url": "https://github.com/afriscic/BarcodeScanning.Native.Maui/blob/dd51248cb791fe8b7099b5d06b6f71c9d766ea81/BarcodeScanning.Test/Platforms/iOS/Program.cs#L9-L14", "sha": "dd51248cb791fe8b7099b5d06b6f71c9d766ea81"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PSConsoleReadLine.ViReplaceLine", "code": "public static void ViReplaceLine(ConsoleKeyInfo? key = null, object arg = null)\n {\n _singleton._groupUndoHelper.StartGroup(ViReplaceLine, arg);\n DeleteLine(key, arg);\n ViInsertMode(key, arg);\n }", "docstring": "/// ", "url": "https://github.com/PowerShell/AIShell/blob/8fc4c0dc16cdcf5aaefefec24e3b10ecc85c3ad0/shell/ReadLine/Replace.vi.cs#L138-L143", "sha": "8fc4c0dc16cdcf5aaefefec24e3b10ecc85c3ad0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReplaceCommand.RegenerateAsync", "code": "private async Task RegenerateAsync()\n {\n ArgumentPlaceholder ap = _agent.ArgPlaceholder;\n\n // We are doing the replacement locally, but want to fake the regeneration.\n await Task.Delay(2000, Shell.CancellationToken);\n\n ResponseData data = ap.ResponseData;\n _agent.ReplaceKnownPlaceholders(data);\n\n if (data.PlaceholderSet is null)\n {\n _agent.ResetArgumentPlaceholder();\n }\n\n return _agent.GenerateAnswer(data, Shell);\n }", "docstring": "/// ", "url": "https://github.com/PowerShell/AIShell/blob/8fc4c0dc16cdcf5aaefefec24e3b10ecc85c3ad0/shell/agents/Microsoft.Azure.Agent/Command.cs#L260-L276", "sha": "8fc4c0dc16cdcf5aaefefec24e3b10ecc85c3ad0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RenderTreeBuilderExtensions.CreateComponent", "code": "public static void CreateComponent(this RenderTreeBuilder builder) where T : IComponent\n {\n var sequence = 0;\n CreateComponent(builder, ref sequence);\n }", "docstring": "/// ", "url": "https://github.com/elsa-workflows/elsa-studio/blob/95c37625ffcdfde09f1a3244a386b0ab005cdd82/src/framework/Elsa.Studio.Core/Extensions/RenderTreeBuilderExtensions.cs#L14-L18", "sha": "95c37625ffcdfde09f1a3244a386b0ab005cdd82"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServiceCollectionExtensions.AddLoginModuleCore", "code": "public static IServiceCollection AddLoginModuleCore(this IServiceCollection services)\n {\n return services\n .AddScoped()\n .AddOptions()\n .AddAuthorizationCore()\n .AddScoped()\n .AddScoped()\n .AddScoped()\n .AddScoped()\n ;\n }", "docstring": "/// ", "url": "https://github.com/elsa-workflows/elsa-studio/blob/95c37625ffcdfde09f1a3244a386b0ab005cdd82/src/modules/Elsa.Studio.Login/Extensions/ServiceCollectionExtensions.cs#L20-L31", "sha": "95c37625ffcdfde09f1a3244a386b0ab005cdd82"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InputDescriptorCheckListExtensions.GetCheckList", "code": "public static CheckList GetCheckList(this InputDescriptor descriptor)\n {\n var specifications = descriptor.UISpecifications;\n var props = specifications != null ? specifications.TryGetValue(\"checklist\", out var propsValue) ? propsValue is JsonElement value ? value : default : default : default;\n\n if (props.ValueKind == JsonValueKind.Undefined)\n return new CheckList(Array.Empty());\n\n var serializerOptions = new JsonSerializerOptions\n {\n PropertyNamingPolicy = JsonNamingPolicy.CamelCase,\n };\n\n var checkListProps = props.Deserialize(serializerOptions);\n return checkListProps?.CheckList ?? new CheckList(Array.Empty());\n }", "docstring": "/// ", "url": "https://github.com/elsa-workflows/elsa-studio/blob/95c37625ffcdfde09f1a3244a386b0ab005cdd82/src/modules/Elsa.Studio.UIHints/Extensions/InputDescriptorCheckListExtensions.cs#L15-L30", "sha": "95c37625ffcdfde09f1a3244a386b0ab005cdd82"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RemoteLogPersistenceStrategyService.GetLogPersistenceStrategiesAsync", "code": "public async Task> GetLogPersistenceStrategiesAsync(CancellationToken cancellationToken = default)\n {\n if (_descriptors == null)\n {\n var api = await backendApiClientProvider.GetApiAsync(cancellationToken);\n var response = await api.ListAsync(cancellationToken);\n _descriptors = response.Items;\n }\n\n return _descriptors;\n }", "docstring": "/// ", "url": "https://github.com/elsa-workflows/elsa-studio/blob/95c37625ffcdfde09f1a3244a386b0ab005cdd82/src/modules/Elsa.Studio.Workflows.Core/Domain/Services/RemoteLogPersistenceStrategyService.cs#L13-L23", "sha": "95c37625ffcdfde09f1a3244a386b0ab005cdd82"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RemoteWorkflowDefinitionService.ListAsync", "code": "public async Task> ListAsync(ListWorkflowDefinitionsRequest request, VersionOptions? versionOptions = null, CancellationToken cancellationToken = default)\n {\n var api = await GetApiAsync(cancellationToken);\n return await api.ListAsync(request, versionOptions, cancellationToken);\n }", "docstring": "/// ", "url": "https://github.com/elsa-workflows/elsa-studio/blob/95c37625ffcdfde09f1a3244a386b0ab005cdd82/src/modules/Elsa.Studio.Workflows.Core/Domain/Services/WorkflowDefinitionService.cs#L25-L29", "sha": "95c37625ffcdfde09f1a3244a386b0ab005cdd82"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HttpContextExtensions.IsAspNetStaticRequest", "code": "public static bool IsAspNetStaticRequest(this HttpRequest request) =>\n\t\t\t(request is not null) &&\n\t\t\trequest.Headers.TryGetValue(HeaderNames.UserAgent, out var ua) &&\n\t\t\tua.Contains(Consts.AspNetStatic);", "docstring": "/// ", "url": "https://github.com/ZarehD/AspNetStatic/blob/25aced55cddc84eaf910363ec377ffef99853e7b/src/AspNetStatic/HttpContextExtensions.cs#L33-L36", "sha": "25aced55cddc84eaf910363ec377ffef99853e7b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StaticGeneratorHostExtension.GenerateStaticContent", "code": "public static void GenerateStaticContent(\n\t\t\tthis IHost host,\n\t\t\tstring destinationRoot,\n\t\t\tbool exitWhenDone = default,\n\t\t\tbool alwaysDefaultFile = default,\n\t\t\tbool dontUpdateLinks = default,\n\t\t\tbool dontOptimizeContent = default,\n\t\t\tTimeSpan? regenerationInterval = default,\n\t\t\tulong httpTimeoutSeconds = c_DefaultHttpTimeoutSeconds)\n\t\t{\n\t\t\tThrow.IfNull(host);\n\t\t\tThrow.IfNullOrWhitespace(destinationRoot);\n\n\t\t\tvar fileSystem = host.Services.GetService() ?? new FileSystem();\n\n\t\t\tThrow.DirectoryNotFoundWhen(\n\t\t\t\t() => !fileSystem.Directory.Exists(destinationRoot),\n\t\t\t\tSR.Err_InvalidDestinationRoot);\n\n\t\t\tvar loggerFactory = host.Services.GetRequiredService();\n\t\t\tvar logger = loggerFactory.CreateLogger(nameof(StaticGeneratorHostExtension));\n\n\t\t\tvar pageUrlProvider = host.Services.GetRequiredService();\n\n\t\t\tif (!pageUrlProvider.PageResources.Any())\n\t\t\t{\n\t\t\t\tlogger.NoPagesToProcess();\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tvar optimizerSelector = GetOptimizerSelector(host.Services, dontOptimizeContent);\n\n\t\t\tvar lifetime = host.Services.GetRequiredService();\n\n\t\t\tlifetime.ApplicationStopping.Register(_appShutdown.Cancel);\n\n\t\t\tlifetime.ApplicationStarted.Register(\n\t\t\t\tasync () =>\n\t\t\t\t{\n\t\t\t\t\ttry\n\t\t\t\t\t{\n\t\t\t\t\t\t_httpClient.BaseAddress = new Uri(GetBaseUri(host));\n\t\t\t\t\t\t_httpClient.Timeout = TimeSpan.FromSeconds(httpTimeoutSeconds);\n\t\t\t\t\t\t_httpClient.DefaultRequestHeaders.Add(HeaderNames.UserAgent, Consts.AspNetStatic);\n\n\t\t\t\t\t\tvar generatorConfig =\n\t\t\t\t\t\t\tnew StaticGeneratorConfig(\n\t\t\t\t\t\t\t\tpageUrlProvider.Resources,\n\t\t\t\t\t\t\t\tdestinationRoot,\n\t\t\t\t\t\t\t\talwaysDefaultFile,\n\t\t\t\t\t\t\t\t!dontUpdateLinks,\n\t\t\t\t\t\t\t\tpageUrlProvider.DefaultFileName,\n\t\t\t\t\t\t\t\tpageUrlProvider.PageFileExtension.EnsureStartsWith('.'),\n\t\t\t\t\t\t\t\tpageUrlProvider.DefaultFileExclusions,\n\t\t\t\t\t\t\t\t!dontOptimizeContent, optimizerSelector,\n\t\t\t\t\t\t\t\tpageUrlProvider.SkipPageResources,\n\t\t\t\t\t\t\t\tpageUrlProvider.SkipCssResources,\n\t\t\t\t\t\t\t\tpageUrlProvider.SkipJsResources,\n\t\t\t\t\t\t\t\tpageUrlProvider.SkipBinResources);\n\n\t\t\t\t\t\tlogger.RegenerationConfig(regenerationInterval);\n\t\t\t\t\t\tvar doPeriodicRefresh = regenerationInterval is not null;\n\n#if USE_PERIODIC_TIMER\n\t\t\t\t\t\tif (!_appShutdown.IsCancellationRequested)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tif (doPeriodicRefresh) _timer = new(regenerationInterval!.Value);\n\t\t\t\t\t\t\tdo\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tawait StaticGenerator.Execute(\n\t\t\t\t\t\t\t\t\tgeneratorConfig,\n\t\t\t\t\t\t\t\t\t_httpClient, fileSystem,\n\t\t\t\t\t\t\t\t\tloggerFactory, _appShutdown.Token)\n\t\t\t\t\t\t\t\t.ConfigureAwait(false);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\twhile (doPeriodicRefresh && await _timer.WaitForNextTickAsync(_appShutdown.Token).ConfigureAwait(false));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (exitWhenDone && !doPeriodicRefresh && !_appShutdown.IsCancellationRequested)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tlogger.Exiting();\n\t\t\t\t\t\t\tawait Task.Delay(500).ConfigureAwait(false);\n\t\t\t\t\t\t\tawait host.StopAsync().ConfigureAwait(false);\n\t\t\t\t\t\t}\n#else\n\t\t\t\t\t\tawait StaticPageGenerator.Execute(\n\t\t\t\t\t\t\tgeneratorConfig,\n\t\t\t\t\t\t\t_httpClient, fileSystem,\n\t\t\t\t\t\t\tloggerFactory, _appShutdown.Token)\n\t\t\t\t\t\t\t.ConfigureAwait(false);\n\n\t\t\t\t\t\tif (doPeriodicRefresh && !_appShutdown.IsCancellationRequested)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t_timer.Interval = regenerationInterval!.Value.TotalMilliseconds;\n\t\t\t\t\t\t\t_timer.Elapsed +=\n\t\t\t\t\t\t\t\tasync (s, e) =>\n\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\tawait StaticPageGenerator.Execute(\n\t\t\t\t\t\t\t\t\t\tgeneratorConfig,\n\t\t\t\t\t\t\t\t\t\t_httpClient, fileSystem,\n\t\t\t\t\t\t\t\t\t\tloggerFactory, _appShutdown.Token)\n\t\t\t\t\t\t\t\t\t\t.ConfigureAwait(false);\n\n\t\t\t\t\t\t\t\t\tif (_appShutdown.IsCancellationRequested)\n\t\t\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\t\t\t_timer.Stop();\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t_timer.Start();\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (_appShutdown.IsCancellationRequested)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t_timer.Stop();\n\t\t\t\t\t\t}\n\t\t\t\t\t\telse if (exitWhenDone && !doPeriodicRefresh)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tlogger.Exiting();\n\t\t\t\t\t\t\tawait Task.Delay(500).ConfigureAwait(false);\n\t\t\t\t\t\t\tawait host.StopAsync().ConfigureAwait(false);\n\t\t\t\t\t\t}\n#endif\n\t\t\t\t\t}\n\t\t\t\t\tcatch (OperationCanceledException) { }\n\t\t\t\t\tcatch (Exception ex)\n\t\t\t\t\t{\n\t\t\t\t\t\tlogger.Exception(ex);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t}", "docstring": "/// ", "url": "https://github.com/ZarehD/AspNetStatic/blob/25aced55cddc84eaf910363ec377ffef99853e7b/src/AspNetStatic/StaticGeneratorHostExtension.cs#L86-L215", "sha": "25aced55cddc84eaf910363ec377ffef99853e7b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StringExtensions.ToCitationUrl", "code": "internal static string ToCitationUrl(this string fileName, string baseUrl)\n {\n var builder = new UriBuilder(baseUrl);\n builder.Path += $\"/{fileName}\";\n builder.Fragment = \"view-fitV\";\n\n return builder.Uri.AbsoluteUri;\n }", "docstring": "/// ", "url": "https://github.com/Azure-Samples/azure-search-openai-demo-csharp/blob/fd451e3cef53fe0c65ae8c283d95a389e4f203bd/app/frontend/Extensions/StringExtensions.cs#L11-L18", "sha": "fd451e3cef53fe0c65ae8c283d95a389e4f203bd"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DeveloperIdTests.LoginUI_ControllerPATLoginTest_Success", "code": "[TestMethod]\n [TestCategory(\"LiveData\")]\n public async Task LoginUI_ControllerPATLoginTest_Success()\n {\n // Create DataRows during Runtime since these need Env vars\n RuntimeDataRow[] dataRows =\n {\n new()\n {\n InitialState = \"EnterpriseServerPATPage\",\n Actions = LoginUITestData.ConnectButtonAction,\n Inputs = LoginUITestData.GoodPATEnterpriseServerPATInput,\n FinalState = \"LoginSucceededPage\",\n HostAddress = Environment.GetEnvironmentVariable(\"DEV_HOME_TEST_GITHUB_ENTERPRISE_SERVER\"),\n },\n new()\n {\n InitialState = \"EnterpriseServerPATPage\",\n Actions = LoginUITestData.ConnectButtonAction,\n Inputs = LoginUITestData.GoodPATGithubComPATInput,\n FinalState = \"LoginSucceededPage\",\n HostAddress = \"https://api.github.com\",\n },\n }\n ;\n\n foreach (RuntimeDataRow dataRow in dataRows)\n {\n var testExtensionAdaptiveCard = new MockExtensionAdaptiveCard(string.Empty, string.Empty, string.Empty);\n Assert.AreEqual(0, testExtensionAdaptiveCard.UpdateCount);\n\n // Create a LoginUIController and initialize it with the testExtensionAdaptiveCard.\n var controller = new LoginUIController(MockDeveloperIdProvider.GetInstance());\n Assert.AreEqual(ProviderOperationStatus.Success, controller.Initialize(testExtensionAdaptiveCard).Status);\n Assert.AreEqual(1, testExtensionAdaptiveCard.UpdateCount);\n\n // Set the initial state.\n testExtensionAdaptiveCard.State = dataRow.InitialState ?? string.Empty;\n Assert.AreEqual(dataRow.InitialState, testExtensionAdaptiveCard.State);\n\n // Set HostAddress for EnterpriseServerPATPage to make this a valid state\n if (dataRow.InitialState == \"EnterpriseServerPATPage\")\n {\n controller.HostAddress = new Uri(dataRow.HostAddress ?? string.Empty);\n Assert.AreEqual(dataRow.HostAddress, controller.HostAddress.OriginalString);\n }\n\n // Call OnAction() with the actions and inputs.\n Assert.AreEqual(ProviderOperationStatus.Success, (await controller.OnAction(dataRow.Actions ?? string.Empty, dataRow.Inputs ?? string.Empty)).Status);\n\n // Verify the final state\n Assert.AreEqual(dataRow.FinalState, testExtensionAdaptiveCard.State);\n Assert.AreEqual(2, testExtensionAdaptiveCard.UpdateCount);\n\n controller.Dispose();\n }\n }", "docstring": "/* This test requires the following environment variables to be set:\n * DEV_HOME_TEST_GITHUB_ENTERPRISE_SERVER : The host address of the GitHub Enterprise Server to test against\n * DEV_HOME_TEST_GITHUB_COM_PAT : A valid Personal Access Token for GitHub.com (with at least repo_public permissions)\n * DEV_HOME_TEST_GITHUB_ENTERPRISE_SERVER_PAT : A valid Personal Access Token for the GitHub Enterprise Server set in DEV_HOME_TEST_GITHUB_ENTERPRISE_SERVER (with at least repo_public permissions)\n */", "url": "https://github.com/microsoft/devhomegithubextension/blob/054645220c7e9e71dd7c75a9781d36964af346c0/test/GitHubExtension/DeveloperId/LoginUITests.cs#L149-L205", "sha": "054645220c7e9e71dd7c75a9781d36964af346c0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "POffsetInfoType_FbArray.IsMDim", "code": "public override bool IsMDim() { return false; }", "docstring": "//!!! TODO", "url": "https://github.com/thomas-v2/S7CommPlusDriver/blob/76ca5e78cba06628146591277adf3dfafd62ca63/src/S7CommPlusDriver/Core/POffsetInfoType.cs#L120-L120", "sha": "76ca5e78cba06628146591277adf3dfafd62ca63"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WebView2Ex.RegisterDragDropEvents", "code": "void RegisterDragDropEvents()\n {\n var manager = CoreDragDropManager.GetForCurrentView();\n manager.TargetRequested += TargetRequested;\n }", "docstring": "//LinkedListNode? thisNode;", "url": "https://github.com/JitHubApp/JitHubV2/blob/01d7d884407b4b92a51cf0a95c08e95ed2acde5d/JitHub.WebView/UI/WebView2Ex.DragDrop.cs#L26-L30", "sha": "01d7d884407b4b92a51cf0a95c08e95ed2acde5d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IntParameter.Interp", "code": "public override void Interp(int from, int to, float t)\n {\n // Int snapping interpolation. Don't use this for enums as they don't necessarily have\n // contiguous values. Use the default interpolator instead (same as bool).\n value = (int)(from + (to - from) * t);\n }", "docstring": "/// ", "url": "https://github.com/ndepoel/FSR3Unity/blob/71879e71aa72465a79bdf009d7d71f9f7ab83066/Packages/com.unity.postprocessing/PostProcessing/Runtime/ParameterOverride.cs#L233-L238", "sha": "71879e71aa72465a79bdf009d7d71f9f7ab83066"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ParserTestBase.ParseSqlStatements", "code": "public Sequence ParseSqlStatements(string sql, IEnumerable dialects, bool unescape = false, ParserOptions? options = null)\n {\n options ??= new ParserOptions { Unescape = unescape };\n return OneOfIdenticalResults(dialect =>\n {\n options.TrailingCommas |= dialect.SupportsTrailingCommas;\n return new Parser().ParseSql(sql, dialect, options);\n }, dialects)!;\n }", "docstring": "// Ensures that `sql` parses as a single statement and returns it.", "url": "https://github.com/TylerBrinks/SqlParser-cs/blob/66aea72a1cb0ed72e3f50cc4bf83326de41b93c2/src/SqlParser.Tests/ParserTestBase.cs#L172-L180", "sha": "66aea72a1cb0ed72e3f50cc4bf83326de41b93c2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fictioneer_build_customize_css", "code": "function fictioneer_build_customize_css( $context = null ) {\n // --- Setup -----------------------------------------------------------------\n\n $file_path = fictioneer_get_theme_cache_dir( 'build_customize_css' ) . '/customize.css';\n $site_width = (int) get_theme_mod( 'site_width', FICTIONEER_DEFAULT_SITE_WIDTH );\n $header_image_style = get_theme_mod( 'header_image_style', 'default' );\n $header_style = get_theme_mod( 'header_style', 'default' );\n $content_list_style = get_theme_mod( 'content_list_style', 'default' );\n $page_style = get_theme_mod( 'page_style', 'default' );\n $card_style = get_theme_mod( 'card_style', 'default' );\n $card_frame = get_theme_mod( 'card_frame', 'default' );\n $footer_style = get_theme_mod( 'footer_style', 'default' );\n $sidebar_style = get_theme_mod( 'sidebar_style', 'none' );\n $css = '';\n\n if ( $context === 'preview' ) {\n $file_path = fictioneer_get_theme_cache_dir( 'preview' ) . '/customize-preview.css';\n }\n\n // --- Assets ----------------------------------------------------------------\n\n if (\n $header_image_style === 'polygon-battered' ||\n in_array( $page_style, ['polygon-battered', 'polygon-mask-image-battered-ringbook'] )\n ) {\n $css .= fictioneer_get_customizer_css_snippet( 'polygon-battered' );\n }\n\n // --- Header image style ----------------------------------------------------\n\n if ( $header_image_style === 'polygon-battered' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'header-image-style-battered' );\n }\n\n if ( $header_image_style === 'polygon-chamfered' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'header-image-style-chamfered' );\n }\n\n if ( $header_image_style === 'mask-grunge-frame-a-large' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'header-image-style-grunge-frame-a-large' );\n }\n\n if ( $header_image_style === 'mask-grunge-frame-a-small' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'header-image-style-grunge-frame-a-small' );\n }\n\n // --- Fading header image ---------------------------------------------------\n\n $header_image_fading_start = fictioneer_sanitize_integer( get_theme_mod( 'header_image_fading_start', 0 ), 0, 0, 99 );\n $header_image_fading_breakpoint = (int) get_theme_mod( 'header_image_fading_breakpoint', 0 );\n\n if ( $header_image_fading_start > 0 ) {\n if ( $header_image_fading_breakpoint > 320 ) {\n $css .= \"@media only screen and (min-width: {$header_image_fading_breakpoint}px) {\n :root {\n --header-fading-mask-image: \" . fictioneer_get_fading_gradient( 100, $header_image_fading_start, 100, 'var(--header-fading-mask-image-rotation, 180deg)' ) . \";\n }\n }\";\n } else {\n $css .= \":root {\n --header-fading-mask-image: \" . fictioneer_get_fading_gradient( 100, $header_image_fading_start, 100, 'var(--header-fading-mask-image-rotation, 180deg)' ) . \";\n }\";\n }\n\n $css .= '@media only screen and (min-width: 1024px) {\n .inset-header-image .header-background._style-default._fading-bottom._shadow .header-background__wrapper {\n margin-left: 4px;\n margin-right: 4px;\n }\n }';\n }\n\n // --- Inset header image ----------------------------------------------------\n\n if ( get_theme_mod( 'inset_header_image' ) ) {\n $css .= fictioneer_get_customizer_css_snippet( 'inset-header-image' );\n }\n\n // --- Base layout -----------------------------------------------------------\n\n $hue_offset_dark = (int) get_theme_mod( 'hue_offset', 0 );\n $saturation_offset_dark = (int) get_theme_mod( 'saturation_offset', 0 );\n $lightness_offset_dark = (int) get_theme_mod( 'lightness_offset', 0 );\n $font_saturation_offset_dark = (int) get_theme_mod( 'font_saturation_offset', 0 );\n $font_lightness_offset_dark = (int) get_theme_mod( 'font_lightness_offset', 0 );\n $hue_offset_light = (int) get_theme_mod( 'hue_offset_light', 0 );\n $saturation_offset_light = (int) get_theme_mod( 'saturation_offset_light', 0 );\n $lightness_offset_light = (int) get_theme_mod( 'lightness_offset_light', 0 );\n $font_saturation_offset_light = (int) get_theme_mod( 'font_saturation_offset_light', 0 );\n $font_lightness_offset_light = (int) get_theme_mod( 'font_lightness_offset_light', 0 );\n $site_width = (int) get_theme_mod( 'site_width', FICTIONEER_DEFAULT_SITE_WIDTH );\n $main_offset = (int) get_theme_mod( 'main_offset', 0 );\n $sidebar_width = (int) get_theme_mod( 'sidebar_width', 256 );\n $sidebar_gap = (int) get_theme_mod( 'sidebar_gap', 48 );\n $logo_min_height = (int) get_theme_mod( 'logo_min_height', 210 );\n $logo_max_height = (int) get_theme_mod( 'logo_height', 210 );\n $title_min = (int) get_theme_mod( 'site_title_font_size_min', 32 );\n $title_max = (int) get_theme_mod( 'site_title_font_size_max', 60 );\n $tagline_min = (int) get_theme_mod( 'site_tagline_font_size_min', 13 );\n $tagline_max = (int) get_theme_mod( 'site_tagline_font_size_max', 18 );\n $header_image_min = (int) get_theme_mod( 'header_image_height_min', 210 );\n $header_image_max = (int) get_theme_mod( 'header_image_height_max', 480 );\n $header_bg_color_light = fictioneer_get_theme_color( 'header_color_light', '' );\n $header_bg_color_dark = fictioneer_get_theme_color( 'header_color_dark', '' );\n $header_min = (int) get_theme_mod( 'header_height_min', 190 );\n $header_max = (int) get_theme_mod( 'header_height_max', 380 );\n $story_cover_width_offset = (int) get_theme_mod( 'story_cover_width_offset', 0 );\n $story_cover_box_shadow = get_theme_mod( 'story_cover_shadow', 'var(--box-shadow-xl)' );\n $card_grid_column_min = (int) get_theme_mod( 'card_grid_column_min', 308 );\n $card_cover_width_mod = get_theme_mod( 'card_cover_width_mod', 1 );\n $card_grid_column_gap_mod = get_theme_mod( 'card_grid_column_gap_mod', 1 );\n $card_grid_row_gap_mod = get_theme_mod( 'card_grid_row_gap_mod', 1 );\n $card_font_size_min_mod = get_theme_mod( 'card_font_size_min_mod', 0 );\n $card_font_size_grow_mod = get_theme_mod( 'card_font_size_grow_mod', 0 );\n $card_font_size_max_mod = get_theme_mod( 'card_font_size_max_mod', 0 );\n $card_box_shadow = get_theme_mod( 'card_shadow', 'var(--box-shadow-m)' );\n $font_primary = fictioneer_get_custom_font( 'primary_font_family_value', 'var(--ff-system)', 'Open Sans' );\n $font_secondary = fictioneer_get_custom_font( 'secondary_font_family_value', 'var(--ff-base)', 'Lato' );\n $font_heading = fictioneer_get_custom_font( 'heading_font_family_value', 'var(--ff-base)', 'Open Sans' );\n $font_site_title = fictioneer_get_custom_font( 'site_title_font_family_value', 'var(--ff-heading)', 'default' );\n $font_nav_item = fictioneer_get_custom_font( 'nav_item_font_family_value', 'var(--ff-base)', 'default' );\n $font_story_title = fictioneer_get_custom_font( 'story_title_font_family_value', 'var(--ff-heading)', 'default' );\n $font_chapter_title = fictioneer_get_custom_font( 'chapter_title_font_family_value', 'var(--ff-heading)', 'default' );\n $font_chapter_list_title = fictioneer_get_custom_font( 'chapter_list_title_font_family_value', 'var(--ff-base)', 'default' );\n $font_card_title = fictioneer_get_custom_font( 'card_title_font_family_value', 'var(--ff-heading)', 'default' );\n $font_card_body = fictioneer_get_custom_font( 'card_body_font_family_value', 'var(--ff-note)', 'default' );\n $font_card_list_link = fictioneer_get_custom_font( 'card_list_link_font_family_value', 'var(--ff-note)', 'default' );\n\n $dark_shade = fictioneer_hex_to_rgb( get_theme_mod( 'dark_shade', '000000' ) );\n $dark_shade = is_array( $dark_shade ) ? $dark_shade : [0, 0, 0];\n\n if ( $logo_min_height < $logo_max_height ) {\n $logo_height = fictioneer_get_css_clamp( $logo_min_height, $logo_max_height, 320, $site_width );\n } else {\n $logo_height = $logo_max_height . 'px';\n }\n\n $css .= \":root {\n --site-width: {$site_width}px;\n --main-offset: {$main_offset}px;\n --sidebar-width: {$sidebar_width}px;\n --sidebar-gap: {$sidebar_gap}px;\n --hue-offset: {$hue_offset_dark}deg;\n --saturation-offset: \" . $saturation_offset_dark / 100 . \";\n --lightness-offset: \" . $lightness_offset_dark / 100 . \";\n --font-saturation-offset: \" . $font_saturation_offset_dark / 100 . \";\n --font-lightness-offset: \" . $font_lightness_offset_dark / 100 . \";\n --header-image-height: \" . fictioneer_get_css_clamp( $header_image_min, $header_image_max, 320, $site_width ) . \";\n --header-height: calc(\" . fictioneer_get_css_clamp( $header_min, $header_max, 320, $site_width ) . \" - var(--page-inset-top, 0px));\n --header-logo-height: {$logo_height};\n --header-logo-min-height: {$logo_min_height};\n --header-logo-max-height: {$logo_max_height};\n --site-title-font-size: \" . fictioneer_get_css_clamp( $title_min, $title_max, 320, $site_width ) . \";\n --site-title-tagline-font-size: \" . fictioneer_get_css_clamp( $tagline_min, $tagline_max, 320, $site_width ) . \";\n --grid-columns-min: {$card_grid_column_min}px;\n --grid-columns-row-gap-multiplier: {$card_grid_row_gap_mod};\n --grid-columns-col-gap-multiplier: {$card_grid_column_gap_mod};\n --card-font-size-min-mod: {$card_font_size_min_mod}px;\n --card-font-size-grow-mod: {$card_font_size_grow_mod}px;\n --card-font-size-max-mod: {$card_font_size_max_mod}px;\n --ff-base: {$font_primary};\n --ff-note: {$font_secondary};\n --ff-heading: {$font_heading};\n --ff-site-title: {$font_site_title};\n --ff-story-title: {$font_story_title};\n --ff-chapter-title: {$font_chapter_title};\n --ff-chapter-list-title: {$font_chapter_list_title};\n --ff-card-title: {$font_card_title};\n --ff-card-body: {$font_card_body};\n --ff-card-list-link: {$font_card_list_link};\n --ff-nav-item: {$font_nav_item};\n --card-cover-width-mod: {$card_cover_width_mod};\n --card-box-shadow: {$card_box_shadow};\n --card-drop-shadow: \" . str_replace( 'box-', 'drop-', $card_box_shadow ) . \";\n --story-cover-box-shadow: {$story_cover_box_shadow};\n --recommendation-cover-box-shadow: {$story_cover_box_shadow};\n --floating-cover-image-width: \" . fictioneer_get_css_clamp( 56, 200 + $story_cover_width_offset, 320, 768 ) . \";\n --in-content-cover-image-width: \" . fictioneer_get_css_clamp( 100, 200 + $story_cover_width_offset, 375, 768 ) . \";\n }\";\n\n if ( $card_box_shadow === 'none' ) {\n $css .= \".card{box-shadow:none!important;}\";\n }\n\n // Only light mode\n $css .= \":root[data-mode=light] {\n --hue-offset: {$hue_offset_light}deg;\n --saturation-offset: \" . $saturation_offset_light / 100 . \";\n --lightness-offset: \" . $lightness_offset_light / 100 . \";\n --font-saturation-offset: \" . $font_saturation_offset_light / 100 . \";\n --font-lightness-offset: \" . $font_lightness_offset_light / 100 . \";\n }\";\n\n // --- Custom layout ---------------------------------------------------------\n\n if ( $sidebar_style !== 'none' ) {\n $css .= \":root, :root[data-theme=base] {\n --layout-spacing-horizontal: \" . fictioneer_get_css_clamp( 20, 48, 480, $site_width ) . \";\n --layout-spacing-horizontal-small: \" . fictioneer_get_css_clamp( 10, 20, 320, 400 ) . \";\n }\";\n }\n\n if ( get_theme_mod( 'use_custom_layout', false ) ) {\n $vertical_min = (int) get_theme_mod( 'vertical_spacing_min', 24 );\n $vertical_max = (int) get_theme_mod( 'vertical_spacing_max', 48 );\n $horizontal_min = (int) get_theme_mod( 'horizontal_spacing_min', 20 );\n $horizontal_max = (int) get_theme_mod( 'horizontal_spacing_max', 80 );\n $horizontal_small_min = (int) get_theme_mod( 'horizontal_spacing_small_min', 10 );\n $horizontal_small_max = (int) get_theme_mod( 'horizontal_spacing_small_max', 20 );\n $large_border_radius = (int) get_theme_mod( 'large_border_radius', 4 );\n $small_border_radius = (int) get_theme_mod( 'small_border_radius', 2 );\n $nested_border_radius_multiplier = max( 0, get_theme_mod( 'nested_border_radius_multiplier', 1 ) );\n $content_list_gap = (int) get_theme_mod( 'content_list_gap', 4 );\n\n $css .= \":root, :root[data-theme=base] {\n --layout-spacing-vertical: \" . fictioneer_get_css_clamp( $vertical_min, $vertical_max, 480, $site_width ) . \";\n --layout-spacing-horizontal: \" . fictioneer_get_css_clamp( $horizontal_min, $horizontal_max, 480, $site_width, '%' ) . \";\n --layout-spacing-horizontal-small: \" . fictioneer_get_css_clamp( $horizontal_small_min, $horizontal_small_max, 320, 400, '%' ) . \";\n --layout-border-radius-large: {$large_border_radius}px;\n --layout-border-radius-small: {$small_border_radius}px;\n --layout-nested-border-radius-multiplier: {$nested_border_radius_multiplier};\n --chapter-list-gap: {$content_list_gap}px;\n --content-list-gap: {$content_list_gap}px;\n }\";\n\n if ( $sidebar_style !== 'none' ) {\n $css .= \".has-sidebar {\n --layout-spacing-horizontal: \" . fictioneer_get_css_clamp( $horizontal_min, $horizontal_max, 480, $site_width ) . \";\n --layout-spacing-horizontal-small: \" . fictioneer_get_css_clamp( $horizontal_small_min, $horizontal_small_max, 320, 400 ) . \";\n }\";\n }\n }\n\n // --- Dark mode font weight adjustment --------------------------------------\n\n if ( get_theme_mod( 'dark_mode_font_weight', 'adjusted' ) === 'normal' ) {\n $css .= \":root[data-font-weight=default]:is(html) {\n --font-smoothing-webkit: subpixel-antialiased;\n --font-smoothing-moz: auto;\n --font-weight-normal: 400;\n --font-weight-semi-strong: 600;\n --font-weight-strong: 600;\n --font-weight-medium: 500;\n --font-weight-heading: 700;\n --font-weight-badge: 600;\n --font-weight-post-meta: 400;\n --font-weight-read-ribbon: 700;\n --font-weight-card-label: 600;\n --font-weight-navigation: 400;\n --font-letter-spacing-base: 0em;\n }\";\n }\n\n // --- Dark mode colors ------------------------------------------------------\n\n $css .= \":root {\n --site-title-heading-color: \" . fictioneer_hsl_font_code( fictioneer_get_theme_color( 'dark_header_title_color' ) ) . \";\n --site-title-tagline-color: \" . fictioneer_hsl_font_code( fictioneer_get_theme_color( 'dark_header_tagline_color' ) ) . \";\n }\";\n\n if ( $header_bg_color_dark ) {\n $css .= \":root {\n --header-background-color: \" . fictioneer_hsl_code( $header_bg_color_dark ) . \";\n }\";\n }\n\n if ( get_theme_mod( 'use_custom_dark_mode', false ) ) {\n $css .= \":root, :root[data-theme=base] {\"\n .\n implode( '', array_map( function( $prop ) {\n return \"--bg-{$prop}-free: \" . fictioneer_hsl_code( fictioneer_get_theme_color( \"dark_bg_{$prop}\" ), 'free' ) . ';';\n }, ['50', '100', '200', '300', '400', '500', '600', '700', '800', '900', '950'] ) )\n .\n \"\n --card-frame-border-color: \" . fictioneer_hsl_code( fictioneer_get_theme_color( 'dark_card_frame' ) ) . \";\n --dark-shade-rgb:\" . implode( ' ', $dark_shade ) . \";\n --primary-400: \" . fictioneer_get_theme_color( 'dark_primary_400' ) . \";\n --primary-500: \" . fictioneer_get_theme_color( 'dark_primary_500' ) . \";\n --primary-600: \" . fictioneer_get_theme_color( 'dark_primary_600' ) . \";\n --red-400: \" . fictioneer_get_theme_color( 'dark_red_400' ) . \";\n --red-500: \" . fictioneer_get_theme_color( 'dark_red_500' ) . \";\n --red-600: \" . fictioneer_get_theme_color( 'dark_red_600' ) . \";\n --green-400: \" . fictioneer_get_theme_color( 'dark_green_400' ) . \";\n --green-500: \" . fictioneer_get_theme_color( 'dark_green_500' ) . \";\n --green-600: \" . fictioneer_get_theme_color( 'dark_green_600' ) . \";\n --theme-color-base: \" . fictioneer_hsl_code( fictioneer_get_theme_color( 'dark_theme_color_base' ), 'values' ) . \";\n --navigation-background: \" . fictioneer_hsl_code( fictioneer_get_theme_color( 'dark_navigation_background_sticky' ) ) . \";\n --bookmark-color-alpha: \" . fictioneer_get_theme_color( 'dark_bookmark_color_alpha' ) . \";\n --bookmark-color-beta: \" . fictioneer_get_theme_color( 'dark_bookmark_color_beta' ) . \";\n --bookmark-color-gamma: \" . fictioneer_get_theme_color( 'dark_bookmark_color_gamma' ) . \";\n --bookmark-color-delta: \" . fictioneer_get_theme_color( 'dark_bookmark_color_delta' ) . \";\n --bookmark-line: \" . fictioneer_get_theme_color( 'dark_bookmark_line_color' ) . \";\n --ins-background: \" . fictioneer_get_theme_color( 'dark_ins_background' ) . \";\n --del-background: \" . fictioneer_get_theme_color( 'dark_del_background' ) . \";\"\n .\n implode( '', array_map( function( $prop ) {\n return \"--badge-{$prop}-background: \" . fictioneer_get_theme_color( \"dark_badge_{$prop}_background\" ) . ';';\n }, ['generic', 'moderator', 'admin', 'author', 'supporter', 'override'] ) )\n .\n \"}\n :root, :root[data-theme=base], :root .chapter-formatting, :root[data-theme=base] .chapter-formatting {\"\n .\n implode( '', array_map( function( $prop ) {\n return \"--fg-{$prop}: \" . fictioneer_hsl_font_code( fictioneer_get_theme_color( \"dark_fg_{$prop}\" ) ) . ';';\n }, ['100', '200', '300', '400', '500', '600', '700', '800', '900', '950', 'tinted', 'inverted'] ) )\n .\n \"}\";\n }\n\n // --- Light mode colors -----------------------------------------------------\n\n $css .= \":root[data-mode=light] {\n --site-title-heading-color: \" . fictioneer_hsl_font_code( fictioneer_get_theme_color( 'light_header_title_color' ) ) . \";\n --site-title-tagline-color: \" . fictioneer_hsl_font_code( fictioneer_get_theme_color( 'light_header_tagline_color' ) ) . \";\n }\";\n\n if ( $header_bg_color_light ) {\n $css .= \":root[data-mode=light] {\n --header-background-color: \" . fictioneer_hsl_code( $header_bg_color_light ) . \";\n }\";\n }\n\n if ( get_theme_mod( 'use_custom_light_mode', false ) ) {\n $css .= \":root[data-mode=light] {\"\n .\n implode( '', array_map( function( $prop ) {\n return \"--bg-{$prop}-free: \" . fictioneer_hsl_code( fictioneer_get_theme_color( \"light_bg_{$prop}\" ), 'free' ) . ';';\n }, ['50', '100', '200', '300', '400', '500', '600', '700', '800', '900', '950'] ) )\n .\n \"\n --card-frame-border-color: \" . fictioneer_hsl_code( fictioneer_get_theme_color( 'light_card_frame' ) ) . \";\n --primary-400: \" . fictioneer_get_theme_color( 'light_primary_400' ) . \";\n --primary-500: \" . fictioneer_get_theme_color( 'light_primary_500' ) . \";\n --primary-600: \" . fictioneer_get_theme_color( 'light_primary_600' ) . \";\n --red-400: \" . fictioneer_get_theme_color( 'light_red_400' ) . \";\n --red-500: \" . fictioneer_get_theme_color( 'light_red_500' ) . \";\n --red-600: \" . fictioneer_get_theme_color( 'light_red_600' ) . \";\n --green-400: \" . fictioneer_get_theme_color( 'light_green_400' ) . \";\n --green-500: \" . fictioneer_get_theme_color( 'light_green_500' ) . \";\n --green-600: \" . fictioneer_get_theme_color( 'light_green_600' ) . \";\n --theme-color-base: \" . fictioneer_hsl_code( fictioneer_get_theme_color( 'light_theme_color_base' ), 'values' ) . \";\n --navigation-background: \" . fictioneer_hsl_code( fictioneer_get_theme_color( 'light_navigation_background_sticky' ) ) . \";\n --bookmark-color-alpha: \" . fictioneer_get_theme_color( 'light_bookmark_color_alpha' ) . \";\n --bookmark-color-beta: \" . fictioneer_get_theme_color( 'light_bookmark_color_beta' ) . \";\n --bookmark-color-gamma: \" . fictioneer_get_theme_color( 'light_bookmark_color_gamma' ) . \";\n --bookmark-color-delta: \" . fictioneer_get_theme_color( 'light_bookmark_color_delta' ) . \";\n --bookmark-line: \" . fictioneer_get_theme_color( 'light_bookmark_line_color' ) . \";\n --ins-background: \" . fictioneer_get_theme_color( 'light_ins_background' ) . \";\n --del-background: \" . fictioneer_get_theme_color( 'light_del_background' ) . \";\"\n .\n implode( '', array_map( function( $prop ) {\n return \"--badge-{$prop}-background: \" . fictioneer_get_theme_color( \"light_badge_{$prop}_background\" ) . ';';\n }, ['generic', 'moderator', 'admin', 'author', 'supporter', 'override'] ) )\n .\n \"}\n :root[data-mode=light], :root[data-mode=light] .chapter-formatting {\"\n .\n implode( '', array_map( function( $prop ) {\n return \"--fg-{$prop}: \" . fictioneer_hsl_font_code( fictioneer_get_theme_color( \"light_fg_{$prop}\" ) ) . ';';\n }, ['100', '200', '300', '400', '500', '600', '700', '800', '900', '950', 'tinted', 'inverted'] ) )\n .\n \"}\";\n }\n\n // --- Header styles ---------------------------------------------------------\n\n if ( in_array( $header_style, ['top', 'split'] ) ) {\n $css .= fictioneer_get_customizer_css_snippet( 'header-style-top-split' );\n }\n\n if ( $header_style === 'wide' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'header-style-wide' );\n }\n\n if ( $header_style === 'text_center' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'header-style-text-center' );\n }\n\n if ( $header_style === 'post_content' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'header-style-post-content' );\n }\n\n // --- Page styles -----------------------------------------------------------\n\n if ( $page_style === 'polygon-mask-image-battered-ringbook' || $page_style === 'polygon-battered' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'page-style-battered' );\n }\n\n if ( $page_style === 'polygon-mask-image-battered-ringbook' || $page_style === 'mask-image-ringbook' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'page-style-ringbook' );\n }\n\n if ( $page_style === 'polygon-chamfered' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'page-style-chamfered' );\n }\n\n if ( $page_style === 'polygon-interface-a' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'page-style-interface-a' );\n }\n\n if ( $page_style === 'mask-image-wave-a' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'page-style-wave-a' );\n }\n\n if ( $page_style === 'mask-image-layered-steps-a' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'page-style-layered-steps-a' );\n }\n\n if ( $page_style === 'mask-image-layered-peaks-a' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'page-style-layered-peaks-a' );\n }\n\n if ( $page_style === 'mask-image-grunge-a' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'page-style-grunge-a' );\n }\n\n if ( $page_style === 'none' ) {\n $css .= '.main__background { display:none !important; }';\n }\n\n // --- Page shadow -----------------------------------------------------------\n\n if ( ! get_theme_mod( 'page_shadow', true ) ) {\n $css .= ':root.no-page-shadow {\n --minimal-page-box-shadow: none;\n --page-box-shadow: none;\n --page-drop-shadow: none;\n }';\n }\n\n // --- Card styles -----------------------------------------------------------\n\n if ( in_array( $card_style, ['unfolded', 'combined'] ) ) {\n $css .= fictioneer_get_customizer_css_snippet( 'card-style-unfolded-combined' );\n }\n\n if ( $card_style === 'combined' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'card-style-combined' );\n }\n\n // --- Card frames -----------------------------------------------------------\n\n if ( $card_frame !== 'default' ) {\n $css .= ':root:not(.minimal) .card{filter:var(--card-drop-shadow);}';\n }\n\n if ( in_array( $card_frame, ['stacked_right', 'stacked_left', 'stacked_random'] ) ) {\n $css .= fictioneer_get_customizer_css_snippet( 'card-frame-stacked' );\n\n if ( $card_frame === 'stacked_left' ) {\n $css .= '.card{--this-rotation-mod:-1;}';\n }\n }\n\n if ( $card_frame === 'border_2px' ) {\n $css .= ':root:not(.minimal) .card{--card-style-border-width: 2px;box-shadow: 0 0 0 var(--card-frame-border-thickness, 2px) var(--card-frame-border-color);}';\n }\n\n if ( $card_frame === 'border_3px' ) {\n $css .= ':root:not(.minimal) .card{--card-style-border-width: 3px;box-shadow: 0 0 0 var(--card-frame-border-thickness, 3px) var(--card-frame-border-color);}';\n }\n\n if ( $card_frame === 'chamfered' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'card-frame-chamfered' );\n }\n\n if ( $card_frame === 'battered' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'card-frame-battered' );\n }\n\n // --- Content list style ----------------------------------------------------\n\n if ( $content_list_style === 'full' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'content-list-style-full' );\n }\n\n if ( $content_list_style === 'free' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'content-list-style-free' );\n }\n\n if ( $content_list_style === 'lines' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'content-list-style-lines' );\n }\n\n // --- Footer style ----------------------------------------------------------\n\n if ( $footer_style === 'isolated' ) {\n $css .= fictioneer_get_customizer_css_snippet( 'footer-style-isolated' );\n }\n\n // --- Filters ---------------------------------------------------------------\n\n $css = apply_filters( 'fictioneer_filter_pre_build_customize_css', $css );\n\n // --- Minify ----------------------------------------------------------------\n\n $css = fictioneer_minify_css( $css );\n\n // --- Update options --------------------------------------------------------\n\n if ( $context !== 'preview' ) {\n update_option( 'fictioneer_customize_css_timestamp', time(), true );\n }\n\n // --- Save ------------------------------------------------------------------\n\n file_put_contents( $file_path, $css );\n}", "docstring": "/**\n * Builds customization stylesheet\n *\n * @since 5.11.0\n *\n * @param string|null $context Optional. In which context the stylesheet created,\n * for example 'preview' for the Customizer.\n */", "url": "https://github.com/Tetrakern/fictioneer/blob/75bab85d81d1d55f1a293ba9220605a91d026f59/includes/functions/_customizer.php#L550-L1057", "sha": "75bab85d81d1d55f1a293ba9220605a91d026f59"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fictioneer_save_word_count", "code": "function fictioneer_save_word_count( $post_id ) {\n // Prevent multi-fire\n if ( fictioneer_multi_save_guard( $post_id ) ) {\n return;\n }\n\n // Count\n $word_count = fictioneer_count_words( $post_id );\n\n // Save\n update_post_meta( $post_id, '_word_count', $word_count );\n}", "docstring": "// =============================================================================\n// STORE WORD COUNT AS CUSTOM FIELD\n// =============================================================================\n/**\n * Store word count of posts\n *\n * @since 3.0.0\n * @since 5.23.0 - Account for non-Latin scripts.\n * @since 5.25.0 - Split into action and utility function.\n *\n * @param int $post_id Post ID.\n */", "url": "https://github.com/Tetrakern/fictioneer/blob/75bab85d81d1d55f1a293ba9220605a91d026f59/includes/functions/_service-posts.php#L59-L70", "sha": "75bab85d81d1d55f1a293ba9220605a91d026f59"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fictioneer_maintenance_mode", "code": "function fictioneer_maintenance_mode() {\n if ( get_option( 'fictioneer_enable_maintenance_mode' ) && ! is_customize_preview() ) {\n if ( ! current_user_can( 'edit_themes' ) || ! is_user_logged_in() ) {\n $note = get_option( 'fictioneer_phrase_maintenance' );\n $note = ! empty( $note ) ? $note : __( 'Website under planned maintenance. Please check back later.', 'fictioneer' );\n\n wp_die( __( '

Under Maintenance


', 'fictioneer' ) . $note );\n }\n }\n}", "docstring": "// =============================================================================\n// MAINTENANCE MODE\n// =============================================================================\n/**\n * Toggle maintenance mode from settings with message\n *\n * @since 5.0.0\n * @since 5.12.0 - Exclude Customizer preview.\n */", "url": "https://github.com/Tetrakern/fictioneer/blob/75bab85d81d1d55f1a293ba9220605a91d026f59/includes/functions/_setup-wordpress.php#L14-L23", "sha": "75bab85d81d1d55f1a293ba9220605a91d026f59"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fictioneer_is_commenting_disabled", "code": "function fictioneer_is_commenting_disabled( $post_id = null ) {\n // Setup\n $post_id = $post_id ?? get_the_ID();\n\n // Return immediately if...\n if (\n get_option( 'fictioneer_disable_commenting' ) ||\n get_post_meta( $post_id, 'fictioneer_disable_commenting', true )\n ) {\n return true;\n }\n\n // Check parent story if chapter...\n if ( get_post_type( $post_id ) === 'fcn_chapter' ) {\n $story_id = fictioneer_get_chapter_story_id( $post_id );\n\n if ( $story_id ) {\n return get_post_meta( $story_id, 'fictioneer_disable_commenting', true ) == true;\n }\n }\n\n return false;\n }", "docstring": "/**\n * Check whether commenting is disabled\n *\n * Differs from comments_open() in the regard that it does not hide the whole\n * comment section but does not allow new comments to be posted.\n *\n * @since 5.0.0\n *\n * @param int|null $post_id Post ID the comments are for. Defaults to current post ID.\n *\n * @return boolean True or false.\n */", "url": "https://github.com/Tetrakern/fictioneer/blob/75bab85d81d1d55f1a293ba9220605a91d026f59/includes/functions/_utility.php#L2236-L2258", "sha": "75bab85d81d1d55f1a293ba9220605a91d026f59"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fictioneer_settings_capability_card", "code": "function fictioneer_settings_capability_card( $title, $caps, $role ) {\n // Start HTML ---> ?>\n
\n
\n

\n
\n
\n \n
\n
\n
\n
\n tap(function (): void {\n $this->createProcess(['git', 'rev-parse', '--is-inside-work-tree'])->mustRun();\n })\n ->tap(function () use (&$cachedDiff): void {\n $cachedDiff = $this->option('diff') ?: $this->createProcess($this->diffCommand())->mustRun()->getOutput();\n if (empty($cachedDiff)) {\n throw new RuntimeException('There are no cached files to commit. Try running `git add` to cache some files.');\n }\n })\n ->tap(function () use (&$type): void {\n $type = $this->choice(\n 'Please choice commit type',\n $types = $this->configManager->get('types'),\n array_key_first($types)\n );\n })\n ->tap(function () use ($type, $cachedDiff, &$message): void {\n $message = retry(\n $this->option('retry-times'),\n function ($attempts) use ($cachedDiff, $type): string {\n if ($attempts > 1) {\n $this->output->note('retrying...');\n }\n\n $originalMessage = $this->generatorManager\n ->driver($this->option('generator'))\n ->generate($this->promptFor($cachedDiff, $type));\n $message = $this->tryFixMessage($originalMessage);\n if (! str($message)->jsonValidate()) {\n throw new RuntimeException(sprintf(\n 'The generated commit message(%s) is an invalid JSON.',\n var_export($originalMessage, true)\n ));\n }\n\n return $message;\n },\n $this->option('retry-sleep'),\n $this->configManager->get('retry.when')\n );\n })\n ->tap(function () use (&$message): void {\n $message = Collection::json($message)\n ->map(static function ($content) {\n if (\\is_array($content)) {\n return collect($content)\n ->transform(static function (string $line): string {\n return (string) str($line)->trim(\" \\t\\n\\r\\x0B\")->start('- ');\n })\n ->implode(PHP_EOL);\n }\n\n return $content;\n })\n ->tap(function (Collection $message): void {\n $message = $message->put('', '')->sortKeysUsing(static function (string $a, string $b): int {\n $rules = ['subject', '', 'body'];\n\n return array_search($a, $rules, true) <=> array_search($b, $rules, true);\n });\n // $this->table($message->keys()->all(), [$message->all()]);\n $this->output->horizontalTable($message->keys()->all(), [$message->all()]);\n })\n ->tap(function (): void {\n if (! $this->confirm('Do you want to commit this message?', true)) {\n $this->output->note('regenerating...');\n $this->handle();\n }\n });\n })\n ->tap(function () use ($message): void {\n if ($this->option('dry-run')) {\n $this->info($this->hydrateMessage($message));\n\n return;\n }\n\n tap($this->createProcess($this->commitCommandFor($message)), function (Process $process): void {\n $this->shouldEdit() and $process->setTty(true);\n })->setTimeout(null)->mustRun();\n })\n ->tap(function (): void {\n $this->output->success('Successfully generated and committed message.');\n });\n }", "docstring": "/**\n * @psalm-suppress InvalidArgument\n *\n * @throws \\Exception\n */", "url": "https://github.com/guanguans/ai-commit/blob/d7b9595fede325aef270f569f29a1b33e5486d99/app/Commands/CommitCommand.php#L65-L152", "sha": "d7b9595fede325aef270f569f29a1b33e5486d99"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ClassLoader.addClassMap", "code": "public function addClassMap(array $classMap)\n {\n if ($this->classMap) {\n $this->classMap = array_merge($this->classMap, $classMap);\n } else {\n $this->classMap = $classMap;\n }\n }", "docstring": "/**\n * @param string[] $classMap Class to filename map\n * @psalm-param array $classMap\n *\n * @return void\n */", "url": "https://github.com/zhheo/HeoMusic/blob/9204f94de6a89516ac0fadd6fe2b483d01b002b4/meting-api/vendor/composer/ClassLoader.php#L165-L172", "sha": "9204f94de6a89516ac0fadd6fe2b483d01b002b4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GraphQLApiV2.flatten_blocks", "code": "public static function flatten_blocks( $blocks, $parent_id = null ) {\n\t\t$flattened_blocks = [];\n\n\t\tforeach ( $blocks as $block ) {\n\t\t\t// Gather innerBlocks from current block\n\t\t\t$inner_blocks = $block['innerBlocks'] ?? [];\n\t\t\tunset( $block['innerBlocks'] );\n\n\t\t\t// Set parent ID on current block\n\t\t\t$block['parentId'] = $parent_id;\n\n\t\t\t// Recurse into inner blocks\n\t\t\t$flattened_blocks[] = $block;\n\t\t\t$flattened_blocks = array_merge( $flattened_blocks, self::flatten_blocks( $inner_blocks, $block['id'] ) );\n\t\t}\n\n\t\treturn $flattened_blocks;\n\t}", "docstring": "/**\n\t * Flatten blocks recursively.\n\t *\n\t * @param array $blocks the inner blocks in the block.\n\t * @param string $parent_id Optional. ID of the parent block that $blocks belong to.\n\t *\n\t * @return array\n\t */", "url": "https://github.com/Automattic/vip-block-data-api/blob/be82f1fa726a0a392a311c207bc30a018bb6e5ed/src/graphql/graphql-api-v2.php#L112-L129", "sha": "be82f1fa726a0a392a311c207bc30a018bb6e5ed"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FolioRoutes.load", "code": "protected function load(): void\n {\n if ($this->loaded) {\n return;\n }\n\n if (File::exists($this->cachedFolioRoutesPath)) {\n $cache = File::getRequire($this->cachedFolioRoutesPath);\n\n if (isset($cache['version']) && (int) $cache['version'] === static::$version) {\n $this->routes = $cache['routes'];\n\n $this->loaded = true;\n\n return;\n }\n }\n\n foreach ($this->manager->mountPaths() as $mountPath) {\n $views = Finder::create()->in($mountPath->path)->name('*.blade.php')->files()->getIterator();\n\n foreach ($views as $view) {\n $matchedView = new MatchedView($view->getRealPath(), [], $mountPath->path);\n\n if ($name = $matchedView->name()) {\n $this->routes[$name] = [\n 'mountPath' => Project::relativePathOf($matchedView->mountPath),\n 'path' => Project::relativePathOf($matchedView->path),\n 'baseUri' => $mountPath->baseUri,\n 'domain' => $mountPath->domain,\n ];\n }\n }\n }\n\n $this->loaded = true;\n }", "docstring": "/**\n * Load the routes into memory.\n */", "url": "https://github.com/laravel/folio/blob/fc48190e08f166b0532da0adbf22f2112f01c19e/src/FolioRoutes.php#L71-L107", "sha": "fc48190e08f166b0532da0adbf22f2112f01c19e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PinService.isArrestedRequestValid", "code": "public function isArrestedRequestValid(Request $request): bool\n {\n $param = config('requirepin.param', '_uuid');\n $requirePin = RequirePin::where('user_id', Auth::guard(config('requirepin.auth_guard', 'web'))->user()->id)\n ->where('route_arrested', $request->path())\n ->where('uuid', $request->{$param})\n ->whereNull('approved_at')\n ->whereNull('cancelled_at')\n ->first();\n\n if (!isset($requirePin->id)) {\n return false;\n }\n\n return true;\n }", "docstring": "/**\n * Error Response For Pin Authentication.\n *\n * @param \\Illuminate\\Http\\Request $request\n *\n * @return bool\n */", "url": "https://github.com/ikechukwukalu/requirepin/blob/4d7e690f03d0b4374ec057b1c3f5762d671a9bbd/src/Services/PinService.php#L257-L272", "sha": "4d7e690f03d0b4374ec057b1c3f5762d671a9bbd"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QRimage.image", "code": "private static function image($frame, $pixelPerPoint = 4, $outerFrame = 4) \n {\n $h = count($frame);\n $w = strlen($frame[0]);\n \n $imgW = $w + 2*$outerFrame;\n $imgH = $h + 2*$outerFrame;\n \n $base_image =ImageCreate($imgW, $imgH);\n \n $col[0] = ImageColorAllocate($base_image,255,255,255);\n $col[1] = ImageColorAllocate($base_image,0,0,0);\n\n imagefill($base_image, 0, 0, $col[0]);\n\n for($y=0; $y<$h; $y++) {\n for($x=0; $x<$w; $x++) {\n if ($frame[$y][$x] == '1') {\n ImageSetPixel($base_image,$x+$outerFrame,$y+$outerFrame,$col[1]); \n }\n }\n }\n \n $target_image =ImageCreate($imgW * $pixelPerPoint, $imgH * $pixelPerPoint);\n ImageCopyResized($target_image, $base_image, 0, 0, 0, 0, $imgW * $pixelPerPoint, $imgH * $pixelPerPoint, $imgW, $imgH);\n ImageDestroy($base_image);\n \n return $target_image;\n }", "docstring": "//----------------------------------------------------------------------", "url": "https://github.com/wizwizdev/wizwizxui-timebot/blob/549a9986f6b02858584a03d90b80155d2937a15b/phpqrcode/phpqrcode.php#L977-L1005", "sha": "549a9986f6b02858584a03d90b80155d2937a15b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RequestCore.add_header", "code": "public function add_header($key, $value)\n {\n $this->request_headers[$key] = $value;\n return $this;\n }", "docstring": "/**\n * Add a custom HTTP header to the cURL request.\n *\n * @param string $key (Required) The custom HTTP header to set.\n * @param mixed $value (Required) The value to assign to the custom HTTP header.\n * @return $this A reference to the current instance.\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/OSS/Http/RequestCore.php#L294-L298", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RequestCore.set_curlopts", "code": "public function set_curlopts($curlopts)\n {\n $this->curlopts = $curlopts;\n return $this;\n }", "docstring": "/**\n * Set additional CURLOPT settings. These will merge with the default settings, and override if\n * there is a duplicate.\n *\n * @param array $curlopts (Optional) A set of key-value pairs that set `CURLOPT` options. These will merge with the existing CURLOPTs, and ones passed here will override the defaults. Keys should be the `CURLOPT_*` constants, not strings.\n * @return $this A reference to the current instance.\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/OSS/Http/RequestCore.php#L369-L373", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerSideEncryptionConfig.__construct", "code": "public function __construct($sseAlgorithm = null, $kmsMasterKeyID = null)\n {\n $this->sseAlgorithm = $sseAlgorithm;\n $this->kmsMasterKeyID = $kmsMasterKeyID;\n }", "docstring": "/**\n * ServerSideEncryptionConfig constructor.\n * @param null $sseAlgorithm\n * @param null $kmsMasterKeyID\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/OSS/Model/ServerSideEncryptionConfig.php#L21-L25", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerSideEncryptionConfig.parseFromXml", "code": "public function parseFromXml($strXml)\n {\n $xml = simplexml_load_string($strXml);\n if (!isset($xml->ApplyServerSideEncryptionByDefault)) return;\n foreach ($xml->ApplyServerSideEncryptionByDefault as $default) {\n foreach ($default as $key => $value) {\n if ($key === 'SSEAlgorithm') {\n $this->sseAlgorithm = strval($value);\n } elseif ($key === 'KMSMasterKeyID') {\n $this->kmsMasterKeyID = strval($value);\n }\n }\n break;\n }\n }", "docstring": "/**\n * Parse ServerSideEncryptionConfig from the xml.\n *\n * @param string $strXml\n * @throws OssException\n * @return null\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/OSS/Model/ServerSideEncryptionConfig.php#L34-L48", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetCorsResult.parseDataFromResponse", "code": "protected function parseDataFromResponse()\n {\n $content = $this->rawResponse->body;\n $config = new CorsConfig();\n $config->parseFromXml($content);\n return $config;\n }", "docstring": "/**\n * @return CorsConfig\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/OSS/Result/GetCorsResult.php#L12-L18", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetRefererResult.isResponseOk", "code": "protected function isResponseOk()\n {\n $status = $this->rawResponse->status;\n if ((int)(intval($status) / 100) == 2 || (int)(intval($status)) === 404) {\n return true;\n }\n return false;\n }", "docstring": "/**\n * Judged according to the return HTTP status code, [200-299] that is OK, get the bucket configuration interface,\n * 404 is also considered a valid response\n *\n * @return bool\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/OSS/Result/GetRefererResult.php#L33-L40", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AliyunGreen.buildCanonicalHeaders", "code": "private function buildCanonicalHeaders()\n\t{\n\t\t$sortMap = array();\n\t\tforeach ($this->headers as $headerKey => $headerValue) {\n\t\t\t$key = strtolower($headerKey);\n\t\t\tif (strpos($key, 'x-acs-') === 0) {\n\t\t\t\t$sortMap[$key] = $headerValue;\n\t\t\t}\n\t\t}\n\t\tksort($sortMap);\n\t\t$headerString = '';\n\t\tforeach ($sortMap as $sortMapKey => $sortMapValue) {\n\t\t\t$headerString = $headerString . $sortMapKey . ':' . $sortMapValue . self::$headerSeparator;\n\t\t}\n\t\treturn $headerString;\n\t}", "docstring": "/**\n\t * @return string\n\t */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/lib/AliyunGreen.php#L266-L281", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InstalledVersions.getVersion", "code": "public static function getVersion($packageName)\n {\n foreach (self::getInstalled() as $installed) {\n if (!isset($installed['versions'][$packageName])) {\n continue;\n }\n\n if (!isset($installed['versions'][$packageName]['version'])) {\n return null;\n }\n\n return $installed['versions'][$packageName]['version'];\n }\n\n throw new \\OutOfBoundsException('Package \"' . $packageName . '\" is not installed');\n }", "docstring": "/**\n * @param string $packageName\n * @return string|null If the package is being replaced or provided but is not really installed, null will be returned as version, use satisfies or getVersionRanges if you need to know if a given version is present\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/vendor/composer/InstalledVersions.php#L168-L183", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "JsonLocationTest.testVisitsLocation", "code": "public function testVisitsLocation()\n {\n $location = new JsonLocation();\n $parameter = new Parameter([\n 'name' => 'val',\n 'sentAs' => 'vim',\n 'filters' => ['strtoupper']\n ]);\n $response = new Response(200, [], '{\"vim\":\"bar\"}');\n $result = new Result();\n $result = $location->before($result, $response, $parameter);\n $result = $location->visit($result, $response, $parameter);\n $this->assertEquals('BAR', $result['val']);\n }", "docstring": "/**\n * @group ResponseLocation\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/vendor/guzzlehttp/guzzle-services/tests/ResponseLocation/JsonLocationTest.php#L24-L37", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Client.request", "code": "public function request($method, $uri = '', array $options = [])\n {\n $options[RequestOptions::SYNCHRONOUS] = true;\n return $this->requestAsync($method, $uri, $options)->wait();\n }", "docstring": "/**\n * Create and send an HTTP request.\n *\n * Use an absolute path to override the base path of the client, or a\n * relative path to append to the base path of the client. The URL can\n * contain the query string as well.\n *\n * @param string $method HTTP method.\n * @param string|UriInterface $uri URI object or string.\n * @param array $options Request options to apply. See \\GuzzleHttp\\RequestOptions.\n *\n * @return ResponseInterface\n * @throws GuzzleException\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/vendor/guzzlehttp/guzzle/src/Client.php#L179-L183", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HandlerStack.remove", "code": "public function remove($remove)\n {\n $this->cached = null;\n $idx = is_callable($remove) ? 0 : 1;\n $this->stack = array_values(array_filter(\n $this->stack,\n function ($tuple) use ($idx, $remove) {\n return $tuple[$idx] !== $remove;\n }\n ));\n }", "docstring": "/**\n * Remove a middleware by instance or name from the stack.\n *\n * @param callable|string $remove Middleware to remove by instance or name.\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/vendor/guzzlehttp/guzzle/src/HandlerStack.php#L178-L188", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MultipartStream.getHeaders", "code": "private function getHeaders(array $headers)\n {\n $str = '';\n foreach ($headers as $key => $value) {\n $str .= \"{$key}: {$value}\\r\\n\";\n }\n\n return \"--{$this->boundary}\\r\\n\" . trim($str) . \"\\r\\n\\r\\n\";\n }", "docstring": "/**\n * Get the headers needed before transferring the content of a POST file\n */", "url": "https://github.com/netcccyun/pan/blob/4f2867f186a0bcf9b710d0ee6a5049e4c4709348/includes/vendor/guzzlehttp/psr7/src/MultipartStream.php#L55-L63", "sha": "4f2867f186a0bcf9b710d0ee6a5049e4c4709348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MyViewComponent.render", "code": "public function render()\n {\n return view('component', [\n 'foo' => 'bar',\n ]);\n }", "docstring": "/**\n * @return Closure|\\Illuminate\\Contracts\\View\\View|string\n */", "url": "https://github.com/bladestan/bladestan/blob/b65375103c977e1a3357a9a92309a8a3a44686bb/tests/Rules/Fixture/laravel-component-function.php#L15-L20", "sha": "b65375103c977e1a3357a9a92309a8a3a44686bb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RouteServiceProvider.configureRateLimiting", "code": "protected function configureRateLimiting()\n {\n RateLimiter::for('api', function (Request $request) {\n return Limit::perMinute(60)->by($request->user()?->id ?: $request->ip());\n });\n }", "docstring": "/**\n * Configure the rate limiters for the application.\n *\n * @return void\n */", "url": "https://github.com/beyondcode/writeout.ai/blob/e9a5b58fd7fcbdb9ee5182cc3bbea948c6df868d/app/Providers/RouteServiceProvider.php#L46-L51", "sha": "e9a5b58fd7fcbdb9ee5182cc3bbea948c6df868d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProfileUpdateRequest.rules", "code": "public function rules(): array\n {\n return [\n 'name' => ['string', 'max:255'],\n 'email' => ['email', 'max:255', Rule::unique(User::class)->ignore($this->user()->id)],\n ];\n }", "docstring": "/**\n * Get the validation rules that apply to the request.\n *\n * @return array\n */", "url": "https://github.com/theokafadaris/chatwire/blob/f7b2bd526d9600abbb25179eb221f9439d98ba69/app/Http/Requests/ProfileUpdateRequest.php#L16-L22", "sha": "f7b2bd526d9600abbb25179eb221f9439d98ba69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "App.basePath", "code": "public static function basePath($path = '')\n {\n /** @var \\Illuminate\\Foundation\\Application $instance */\n return $instance->basePath($path);\n }", "docstring": "/**\n * Get the base path of the Laravel installation.\n *\n * @param string $path\n * @return string \n * @static \n */", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/_ide_helper.php#L137-L141", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "App.makeWith", "code": "public static function makeWith($abstract, $parameters = [])\n { //Method inherited from \\Illuminate\\Container\\Container \n /** @var \\Illuminate\\Foundation\\Application $instance */\n return $instance->makeWith($abstract, $parameters);\n }", "docstring": "/**\n * An alias function name for make().\n *\n * @param string|callable $abstract\n * @param array $parameters\n * @return mixed \n * @throws \\Illuminate\\Contracts\\Container\\BindingResolutionException\n * @static \n */", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/_ide_helper.php#L1337-L1341", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Artisan.all", "code": "public static function all()\n { //Method inherited from \\Illuminate\\Foundation\\Console\\Kernel \n /** @var \\App\\Console\\Kernel $instance */\n return $instance->all();\n }", "docstring": "/**\n * Get all of the commands registered with the console.\n *\n * @return array \n * @static \n */", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/_ide_helper.php#L1736-L1740", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Gate.inspect", "code": "public static function inspect($ability, $arguments = [])\n {\n /** @var \\Illuminate\\Auth\\Access\\Gate $instance */\n return $instance->inspect($ability, $arguments);\n }", "docstring": "/**\n * Inspect the user for the given ability.\n *\n * @param string $ability\n * @param array|mixed $arguments\n * @return \\Illuminate\\Auth\\Access\\Response \n * @static \n */", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/_ide_helper.php#L7475-L7479", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Mail.queued", "code": "public static function queued($mailable, $callback = null)\n {\n /** @var \\Illuminate\\Support\\Testing\\Fakes\\MailFake $instance */\n return $instance->queued($mailable, $callback);\n }", "docstring": "/**\n * Get all of the queued mailables matching a truth-test callback.\n *\n * @param string|\\Closure $mailable\n * @param callable|null $callback\n * @return \\Illuminate\\Support\\Collection \n * @static \n */", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/_ide_helper.php#L8990-L8994", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Request.getMimeTypes", "code": "public static function getMimeTypes($format)\n { //Method inherited from \\Symfony\\Component\\HttpFoundation\\Request \n return \\Illuminate\\Http\\Request::getMimeTypes($format);\n }", "docstring": "/**\n * Gets the mime types associated with the format.\n *\n * @return string[] \n * @static \n */", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/_ide_helper.php#L11741-L11744", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "URL.action", "code": "public static function action($action, $parameters = [], $absolute = true)\n {\n /** @var \\Illuminate\\Routing\\UrlGenerator $instance */\n return $instance->action($action, $parameters, $absolute);\n }", "docstring": "/**\n * Get the URL to a controller action.\n *\n * @param string|array $action\n * @param mixed $parameters\n * @param bool $absolute\n * @return string \n * @throws \\InvalidArgumentException\n * @static \n */", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/_ide_helper.php#L16289-L16293", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CronSchedule.natlangElementYear", "code": "final private function natlangElementYear($elem)\n {\n if (! $elem['hasInterval']) {\n return $elem['number1'];\n }\n\n $txt = $this->natlangApply('elemYear: every_consecutive_year'.($elem['interval'] == 1 ? '' : '_plural'), $elem['interval']);\n if (($elem['number1'] != $this->_cronMonths['rangeMin']) || ($elem['number2'] != $this->_cronMonths['rangeMax'])) {\n $txt .= ' ('.$this->natlangApply('elemYear: from_X_through_Y', $elem['number1'], $elem['number2']).')';\n }\n\n return $txt;\n }", "docstring": "//\n// Function: natlangElementYear\n//\n// Description: Converts an entry from the year specification to natural language.\n//", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/app/CustomClasses/CronSchedule.php#L1196-L1208", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AddBatchUuidColumnToActivityLogTable.up", "code": "public function up()\n {\n Schema::table('activity_log', function (Blueprint $table) {\n $table->uuid('batch_uuid')->nullable()->after('properties');\n });\n Schema::table('activity_log_archives', function (Blueprint $table) {\n $table->uuid('batch_uuid')->nullable()->after('properties');\n });\n }", "docstring": "/**\n * Run the migrations.\n *\n * @return void\n */", "url": "https://github.com/rconfig/rconfig/blob/0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7/database/migrations/2022_04_13_090632_add_batch_uuid_column_to_activity_log_table.php#L14-L22", "sha": "0263d71b5ee5b8e3c3e3febbbde7a79df0f7a1c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Snapshots.create", "code": "public function create(array $queryParams = []): Response\n {\n return $this->client->execute(\n $this->createRequest('POST', '/snapshots' . $this->queryBuild($queryParams))\n );\n }", "docstring": "/**\n * # Create storage snapshot\n * Create new snapshot of the whole storage\n *\n * @throws InvalidArgumentException\n */", "url": "https://github.com/hkulekci/qdrant-php/blob/9be8fa3c52514c35f3e6c7db1c34f43335891fa2/src/Endpoints/Snapshots.php#L34-L39", "sha": "9be8fa3c52514c35f3e6c7db1c34f43335891fa2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GoodsService.detail", "code": "public function detail(int $id)\n {\n $goods = Goods::query()\n ->with(['coupon','goods_sub'])\n ->withCount(['carmis' => function($query) {\n $query->where('status', Carmis::STATUS_UNSOLD);\n }])->where('id', $id)->first();\n return $goods;\n }", "docstring": "/**\n * 商品详情\n *\n * @param int $id 商品id\n * @return \\Illuminate\\Database\\Eloquent\\Builder|\\Illuminate\\Database\\Eloquent\\Model|object|null\n *\n * @author assimon\n * @copyright assimon\n * @link http://utf8.hk/\n */", "url": "https://github.com/hiouttime/dujiaoka/blob/51538b6a7e74a67a4f92a242b1e3c6799b64f5ca/app/Service/GoodsService.php#L64-L72", "sha": "51538b6a7e74a67a4f92a242b1e3c6799b64f5ca"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FilamentJobsMonitorPlugin.getId", "code": "public function getId(): string\n {\n return 'filament-jobs-monitor';\n }", "docstring": "/**\n * Get the plugin identifier.\n */", "url": "https://github.com/croustibat/filament-jobs-monitor/blob/6543ae2d5767de9a45f56d1396042dc3f6d08793/src/FilamentJobsMonitorPlugin.php#L72-L75", "sha": "6543ae2d5767de9a45f56d1396042dc3f6d08793"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SubscriptionFactory.expired", "code": "public function expired(): self\n {\n return $this->state([\n 'status' => Subscription::STATUS_EXPIRED,\n ]);\n }", "docstring": "/**\n * Mark the subscription as expired\n */", "url": "https://github.com/lmsqueezy/laravel/blob/689941de4960cccf53ebcd0e8486b016d42ddfac/database/factories/SubscriptionFactory.php#L124-L129", "sha": "689941de4960cccf53ebcd0e8486b016d42ddfac"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TextLoader.load", "code": "public function load(): array\n {\n $text = file_get_contents($this->filePath->getRealPath());\n $metadata = ['source' => $this->filePath->getRealPath()];\n return [new Document(pageContent:$text, metadata: $metadata)];\n }", "docstring": "/**\n * Load from file path.\n *\n * @return Document[]\n */", "url": "https://github.com/kambo-1st/langchain-php/blob/48f9db4b841a72eb2a42377758477a201c64a0b4/src/DocumentLoaders/TextLoader.php#L37-L42", "sha": "48f9db4b841a72eb2a42377758477a201c64a0b4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Comment.newFactory", "code": "protected static function newFactory(): CommentFactory\n {\n return CommentFactory::new();\n }", "docstring": "/**\n * @return CommentFactory\n */", "url": "https://github.com/usamamuneerchaudhary/commentify/blob/c70cfe8cd4cc1cd759e61b5e83726d8e951af2c7/src/Models/Comment.php#L78-L81", "sha": "c70cfe8cd4cc1cd759e61b5e83726d8e951af2c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "edit_category", "code": "function edit_category(){\n if(empty($_POST['name'])){\n msg(-1,'分类名称不能为空');\n }elseif(!preg_match('/^(fa fa-|layui-icon layui-icon-)([A-Za-z0-9]|-)+$/',$_POST['font_icon'])){\n $_POST['font_icon'] = 'fa fa-star-o';\n }\n //父分类不能是自己\n if($_POST['id'] == $_POST['fid']){\n msg(-1,'父分类不能是自己');\n }\n //查CID是否存在\n if(!get_db('user_categorys','cid',['uid'=>UID ,\"cid\" => intval($_POST['id'])])){\n msg(-1,'分类不存在');\n }\n //分类名查重(排除自身)\n if(get_db('user_categorys','cid',['uid'=>UID,'cid[!]'=>intval($_POST['id']),\"name\" => $_POST['name']])){\n msg(-1,'分类名称已存在');\n }\n //父分类不能是二级分类\n if(intval($_POST['fid']) !=0 && get_db('user_categorys','fid',['uid'=>UID ,\"cid\" => intval($_POST['fid']) ]) !=0 ){\n msg(-1,'父分类不能是二级分类');\n }\n //分类下存在子分类,禁止修改父分类\n if( $_POST['fid']!=0 && count_db('user_categorys',['uid'=>UID,'fid'=>$_POST['id']])>0){\n msg(-1,'该分类下已存在子分类!');\n }\n //查父分类是否存在\n if( $_POST['fid'] !=0 && !get_db('user_categorys','cid',['uid'=>UID ,\"cid\" => intval($_POST['fid'])])){\n msg(-1,'父分类不存在');\n }\n //长度检测\n $length_limit = unserialize(get_db(\"global_config\",\"v\",[\"k\"=>\"length_limit\"]));\n if($length_limit['c_name'] > 0 && strlen($_POST['name']) > $length_limit['c_name'] ){\n msg(-1,'名称长度不能大于'.$length_limit['c_name'].'个字节');\n }\n if($length_limit['c_desc'] > 0 && strlen($_POST['description']) > $length_limit['c_desc'] ){\n msg(-1,'名称长度不能大于'.$length_limit['c_desc'].'个字节');\n }\n \n //更新数据\n $data = [\n 'fid'=>$_POST['fid'],\n 'property'=>intval($_POST['property']??'0'),\n 'name'=>$_POST['name'],\n 'up_time'=>time(),\n 'description'=>$_POST['description']??'',\n 'font_icon'=>$_POST['font_icon'],\n ];\n if(!isset($_POST['fid'])){ //为空时不修改父id,避免二级变一级\n unset($data['fid']);\n }\n if(!isset($_POST['font_icon'])){\n unset($data['font_icon']);\n }\n update_db('user_categorys',$data,['uid'=>UID ,\"cid\"=>intval($_POST['id'])],[0,'successful']);\n}", "docstring": "//编辑分类", "url": "https://github.com/tznb1/TwoNav/blob/f87af906d9761178f63ae79ae762ac9a7090dca7/system/api_compatible.php#L223-L278", "sha": "f87af906d9761178f63ae79ae762ac9a7090dca7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "update_db", "code": "function update_db($table,$data,$where,$rp = []){\n global $db;\n try {\n $db->update($table,$data,$where);\n if(empty($rp)){\n return true;\n }else{\n msg($rp[0],$rp[1]);\n }\n }catch (Exception $e) {\n if(Debug){\n msgA(['code'=>-1,'msg'=>'更新数据失败','Message'=>$e->getMessage(),'debug'=>debug_backtrace()]);\n }else{\n Amsg(-1,'更新数据失败');\n }\n }\n}", "docstring": "//更新 $rp = [1,'成功'];", "url": "https://github.com/tznb1/TwoNav/blob/f87af906d9761178f63ae79ae762ac9a7090dca7/system/public.php#L101-L117", "sha": "f87af906d9761178f63ae79ae762ac9a7090dca7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "is_apply", "code": "function is_apply(){\n $apply_user = unserialize( get_db(\"user_config\", \"v\", [\"k\" => \"apply\",\"uid\"=>UID]));\n return ($GLOBALS['global_config']['apply'] == 1 && $apply_user['apply'] > 0);\n}", "docstring": "//是否启用收录", "url": "https://github.com/tznb1/TwoNav/blob/f87af906d9761178f63ae79ae762ac9a7090dca7/system/templates.php#L145-L148", "sha": "f87af906d9761178f63ae79ae762ac9a7090dca7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DisplayController.getTableData", "code": "public function getTableData(string $tableName): JsonResponse\n {\n return response()->json(array(\n \"data\" => $this->tableRules($tableName)\n ));\n }", "docstring": "/**\n * Get table data\n * @param string $tableName\n * @return JsonResponse\n */", "url": "https://github.com/vcian/laravel-db-auditor/blob/50fc3e559ac32889ef3d610dd5938a7f09dd0e6f/src/Controllers/DisplayController.php#L57-L62", "sha": "50fc3e559ac32889ef3d610dd5938a7f09dd0e6f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HTMLPurifier_LanguageFactory.getFallbackFor", "code": "public function getFallbackFor($code)\n {\n $this->loadLanguage($code);\n return $this->cache[$code]['fallback'];\n }", "docstring": "/**\n * Returns the fallback language for language\n * @note Loads the original language into cache\n * @param string $code language code\n * @return string|bool\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/classes/htmlpurifier/HTMLPurifier/LanguageFactory.php#L136-L140", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HTMLPurifier_Lexer.CDATACallback", "code": "protected static function CDATACallback($matches)\n {\n // not exactly sure why the character set is needed, but whatever\n return htmlspecialchars($matches[1], ENT_COMPAT, 'UTF-8');\n }", "docstring": "/**\n * Callback function for escapeCDATA() that does the work.\n *\n * @warning Though this is public in order to let the callback happen,\n * calling it directly is not recommended.\n * @param array $matches PCRE matches array, with index 0 the entire match\n * and 1 the inside of the CDATA section.\n * @return string Escaped internals of the CDATA section.\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/classes/htmlpurifier/HTMLPurifier/Lexer.php#L290-L294", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HTMLPurifier_ConfigSchema_InterchangeBuilder._findUnused", "code": "protected function _findUnused($hash)\n {\n $accessed = $hash->getAccessed();\n foreach ($hash as $k => $v) {\n if (!isset($accessed[$k])) {\n trigger_error(\"String hash key '$k' not used by builder\", E_USER_NOTICE);\n }\n }\n }", "docstring": "/**\n * Triggers errors for any unused keys passed in the hash; such keys\n * may indicate typos, missing values, etc.\n * @param HTMLPurifier_StringHash $hash Hash to check.\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/classes/htmlpurifier/HTMLPurifier/ConfigSchema/InterchangeBuilder.php#L215-L223", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HTMLPurifier_Printer_HTMLDefinition.heavyHeader", "code": "protected function heavyHeader($text, $num = 1)\n {\n $ret = '';\n $ret .= $this->start('tr');\n $ret .= $this->element('th', $text, array('colspan' => $num, 'class' => 'heavy'));\n $ret .= $this->end('tr');\n return $ret;\n }", "docstring": "/**\n * Creates a heavy header row\n * @param string $text\n * @param int $num\n * @return string\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/classes/htmlpurifier/HTMLPurifier/Printer/HTMLDefinition.php#L314-L321", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ADODB_Active_Record.UseDefaultValues", "code": "static function UseDefaultValues($bool=null)\n\t{\n\tglobal $ADODB_ACTIVE_DEFVALS;\n\t\tif (isset($bool)) {\n\t\t\t$ADODB_ACTIVE_DEFVALS = $bool;\n\t\t}\n\t\treturn $ADODB_ACTIVE_DEFVALS;\n\t}", "docstring": "// CFR: class name when in a relationship", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/adodb/adodb-active-recordx.inc.php#L106-L113", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ADODB_Cache_File.readcache", "code": "function &readcache($filename, &$err, $secs2cache, $rsClass) {\n\t\t\t$rs = csv2rs($filename,$err,$secs2cache,$rsClass);\n\t\t\treturn $rs;\n\t\t}", "docstring": "// load serialised recordset and unserialise it", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/adodb/adodb.inc.php#L337-L340", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ADOConnection.GetActiveRecordsClass", "code": "function GetActiveRecordsClass(\n\t\t\t$class, $table,$whereOrderBy=false,$bindarr=false, $primkeyArr=false,\n\t\t\t$extra=array(),\n\t\t\t$relations=array())\n\t{\n\t\tglobal $_ADODB_ACTIVE_DBS;\n\t\t## reduce overhead of adodb.inc.php -- moved to adodb-active-record.inc.php\n\t\t## if adodb-active-recordx is loaded -- should be no issue as they will probably use Find()\n\t\tif (!isset($_ADODB_ACTIVE_DBS)) {\n\t\t\tinclude_once(ADODB_DIR.'/adodb-active-record.inc.php');\n\t\t}\n\t\treturn adodb_GetActiveRecordsClass($this, $class, $table, $whereOrderBy, $bindarr, $primkeyArr, $extra, $relations);\n\t}", "docstring": "/**\n\t * GetActiveRecordsClass Performs an 'ALL' query\n\t *\n\t * @param mixed $class This string represents the class of the current active record\n\t * @param mixed $table Table used by the active record object\n\t * @param mixed $whereOrderBy Where, order, by clauses\n\t * @param mixed $bindarr\n\t * @param mixed $primkeyArr\n\t * @param array $extra Query extras: limit, offset...\n\t * @param mixed $relations Associative array: table's foreign name, \"hasMany\", \"belongsTo\"\n\t * @access public\n\t * @return void\n\t */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/adodb/adodb.inc.php#L2373-L2385", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ADODB_csv.SelectLimit", "code": "function SelectLimit($sql, $nrows = -1, $offset = -1, $inputarr = false, $secs2cache = 0)\n\t{\n\t\tglobal $ADODB_FETCH_MODE;\n\n\t\t$nrows = (int) $nrows;\n\t\t$offset = (int) $offset;\n\t\t$url = $this->_url.'?sql='.urlencode($sql).\"&nrows=$nrows&fetch=\".\n\t\t\t(($this->fetchMode !== false)?$this->fetchMode : $ADODB_FETCH_MODE).\n\t\t\t\"&offset=$offset\";\n\t\t$err = false;\n\t\t$rs = csv2rs($url,$err,false);\n\n\t\tif ($this->debug) print \"$url
$err
\";\n\n\t\t$at = strpos($err,'::::');\n\t\tif ($at === false) {\n\t\t\t$this->_errorMsg = $err;\n\t\t\t$this->_errorNo = (integer)$err;\n\t\t} else {\n\t\t\t$this->_errorMsg = substr($err,$at+4,1024);\n\t\t\t$this->_errorNo = -9999;\n\t\t}\n\t\tif ($this->_errorNo)\n\t\t\tif ($fn = $this->raiseErrorFn) {\n\t\t\t\t$fn($this->databaseType,'EXECUTE',$this->ErrorNo(),$this->ErrorMsg(),$sql,'');\n\t\t\t}\n\n\t\tif (is_object($rs)) {\n\n\t\t\t$rs->databaseType='csv';\n\t\t\t$rs->fetchMode = ($this->fetchMode !== false) ? $this->fetchMode : $ADODB_FETCH_MODE;\n\t\t\t$rs->connection = $this;\n\t\t}\n\t\treturn $rs;\n\t}", "docstring": "// parameters use PostgreSQL convention, not MySQL", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/adodb/drivers/adodb-csv.inc.php#L84-L118", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ADODB_odbtp.Parameter", "code": "function Parameter(&$stmt, &$var, $name, $isOutput=false, $maxLen=0, $type=0)\n\t{\n\t\tif ( $this->odbc_driver == ODB_DRIVER_JET ) {\n\t\t\t$name = '['.$name.']';\n\t\t\tif( !$type && $this->_useUnicodeSQL\n\t\t\t\t&& @odbtp_param_bindtype($stmt[1], $name) == ODB_CHAR )\n\t\t\t{\n\t\t\t\t$type = ODB_WCHAR;\n\t\t\t}\n\t\t}\n\t\telse {\n\t\t\t$name = '@'.$name;\n\t\t}\n\t\treturn @odbtp_attach_param($stmt[1], $name, $var, $type, $maxLen);\n\t}", "docstring": "/*\n\tUsage:\n\t\t$stmt = $db->PrepareSP('SP_RUNSOMETHING'); -- takes 2 params, @myid and @group\n\n\t\t# note that the parameter does not have @ in front!\n\t\t$db->Parameter($stmt,$id,'myid');\n\t\t$db->Parameter($stmt,$group,'group',false,64);\n\t\t$db->Parameter($stmt,$group,'photo',false,100000,ODB_BINARY);\n\t\t$db->Execute($stmt);\n\n\t\t@param $stmt Statement returned by Prepare() or PrepareSP().\n\t\t@param $var PHP variable to bind to. Can set to null (for isNull support).\n\t\t@param $name Name of stored procedure variable name to bind to.\n\t\t@param [$isOutput] Indicates direction of parameter 0/false=IN 1=OUT 2= IN/OUT. This is ignored in odbtp.\n\t\t@param [$maxLen] Holds an maximum length of the variable.\n\t\t@param [$type] The data type of $var. Legal values depend on driver.\n\n\t\tSee odbtp_attach_param documentation at http://odbtp.sourceforge.net.\n\t*/", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/adodb/drivers/adodb-odbtp.inc.php#L511-L525", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PHPMailer.encodeHeader", "code": "public function encodeHeader($str, $position = 'text')\n {\n $matchcount = 0;\n switch (strtolower($position)) {\n case 'phrase':\n if (!preg_match('/[\\200-\\377]/', $str)) {\n // Can't use addslashes as we don't know the value of magic_quotes_sybase\n $encoded = addcslashes($str, \"\\0..\\37\\177\\\\\\\"\");\n if (($str == $encoded) and !preg_match('/[^A-Za-z0-9!#$%&\\'*+\\/=?^_`{|}~ -]/', $str)) {\n return $encoded;\n }\n\n return \"\\\"$encoded\\\"\";\n }\n $matchcount = preg_match_all('/[^\\040\\041\\043-\\133\\135-\\176]/', $str, $matches);\n break;\n /* @noinspection PhpMissingBreakStatementInspection */\n case 'comment':\n $matchcount = preg_match_all('/[()\"]/', $str, $matches);\n //fallthrough\n case 'text':\n default:\n $matchcount += preg_match_all('/[\\000-\\010\\013\\014\\016-\\037\\177-\\377]/', $str, $matches);\n break;\n }\n\n //RFCs specify a maximum line length of 78 chars, however mail() will sometimes\n //corrupt messages with headers longer than 65 chars. See #818\n $lengthsub = 'mail' == $this->Mailer ? 13 : 0;\n $maxlen = static::STD_LINE_LENGTH - $lengthsub;\n // Try to select the encoding which should produce the shortest output\n if ($matchcount > strlen($str) / 3) {\n // More than a third of the content will need encoding, so B encoding will be most efficient\n $encoding = 'B';\n //This calculation is:\n // max line length\n // - shorten to avoid mail() corruption\n // - Q/B encoding char overhead (\"` =??[QB]??=`\")\n // - charset name length\n $maxlen = static::STD_LINE_LENGTH - $lengthsub - 8 - strlen($this->CharSet);\n if ($this->hasMultiBytes($str)) {\n // Use a custom function which correctly encodes and wraps long\n // multibyte strings without breaking lines within a character\n $encoded = $this->base64EncodeWrapMB($str, \"\\n\");\n } else {\n $encoded = base64_encode($str);\n $maxlen -= $maxlen % 4;\n $encoded = trim(chunk_split($encoded, $maxlen, \"\\n\"));\n }\n $encoded = preg_replace('/^(.*)$/m', ' =?' . $this->CharSet . \"?$encoding?\\\\1?=\", $encoded);\n } elseif ($matchcount > 0) {\n //1 or more chars need encoding, use Q-encode\n $encoding = 'Q';\n //Recalc max line length for Q encoding - see comments on B encode\n $maxlen = static::STD_LINE_LENGTH - $lengthsub - 8 - strlen($this->CharSet);\n $encoded = $this->encodeQ($str, $position);\n $encoded = $this->wrapText($encoded, $maxlen, true);\n $encoded = str_replace('=' . static::$LE, \"\\n\", trim($encoded));\n $encoded = preg_replace('/^(.*)$/m', ' =?' . $this->CharSet . \"?$encoding?\\\\1?=\", $encoded);\n } elseif (strlen($str) > $maxlen) {\n //No chars need encoding, but line is too long, so fold it\n $encoded = trim($this->wrapText($str, $maxlen, false));\n if ($str == $encoded) {\n //Wrapping nicely didn't work, wrap hard instead\n $encoded = trim(chunk_split($str, static::STD_LINE_LENGTH, static::$LE));\n }\n $encoded = str_replace(static::$LE, \"\\n\", trim($encoded));\n $encoded = preg_replace('/^(.*)$/m', ' \\\\1', $encoded);\n } else {\n //No reformatting needed\n return $str;\n }\n\n return trim(static::normalizeBreaks($encoded));\n }", "docstring": "/**\n * Encode a header value (not including its label) optimally.\n * Picks shortest of Q, B, or none. Result includes folding if needed.\n * See RFC822 definitions for phrase, comment and text positions.\n *\n * @param string $str The header value to encode\n * @param string $position What context the string will be used in\n *\n * @return string\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/phpmailer/PHPMailer.php#L3043-L3117", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PHPMailer.hasMultiBytes", "code": "public function hasMultiBytes($str)\n {\n if (function_exists('mb_strlen')) {\n return strlen($str) > mb_strlen($str, $this->CharSet);\n }\n\n // Assume no multibytes (we can't handle without mbstring functions anyway)\n return false;\n }", "docstring": "/**\n * Check if a string contains multi-byte characters.\n *\n * @param string $str multi-byte text to wrap encode\n *\n * @return bool\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/phpmailer/PHPMailer.php#L3126-L3134", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PHPMailer.encodeQP", "code": "public function encodeQP($string)\n {\n return static::normalizeBreaks(quoted_printable_encode($string));\n }", "docstring": "/**\n * Encode a string in quoted-printable format.\n * According to RFC2045 section 6.7.\n *\n * @param string $string The text to encode\n *\n * @return string\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/phpmailer/PHPMailer.php#L3200-L3203", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PHPMailer.lang", "code": "protected function lang($key)\n {\n if (count($this->language) < 1) {\n $this->setLanguage('en'); // set the default language\n }\n\n if (array_key_exists($key, $this->language)) {\n if ('smtp_connect_failed' == $key) {\n //Include a link to troubleshooting docs on SMTP connection failure\n //this is by far the biggest cause of support questions\n //but it's usually not PHPMailer's fault.\n return $this->language[$key] . ' https://github.com/PHPMailer/PHPMailer/wiki/Troubleshooting';\n }\n\n return $this->language[$key];\n }\n\n //Return the key as a fallback\n return $key;\n }", "docstring": "/**\n * Get an error message in the current language.\n *\n * @param string $key\n *\n * @return string\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/phpmailer/PHPMailer.php#L3646-L3665", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Smarty.setEscapeHtml", "code": "public function setEscapeHtml($escape_html)\n {\n $this->escape_html = $escape_html;\n }", "docstring": "/**\n * @param boolean $escape_html\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/smarty/libs/Smarty.class.php#L1120-L1123", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "smarty_modifiercompiler_count_words", "code": "function smarty_modifiercompiler_count_words($params)\n{\n if (Smarty::$_MBSTRING) {\n // return 'preg_match_all(\\'#[\\w\\pL]+#' . Smarty::$_UTF8_MODIFIER . '\\', ' . $params[0] . ', $tmp)';\n // expression taken from http://de.php.net/manual/en/function.str-word-count.php#85592\n return 'preg_match_all(\\'/\\p{L}[\\p{L}\\p{Mn}\\p{Pd}\\\\\\'\\x{2019}]*/' . Smarty::$_UTF8_MODIFIER . '\\', ' .\n $params[ 0 ] . ', $tmp)';\n }\n // no MBString fallback\n return 'str_word_count(' . $params[ 0 ] . ')';\n}", "docstring": "/**\n * Smarty plugin\n *\n * @package Smarty\n * @subpackage PluginsModifierCompiler\n */\n/**\n * Smarty count_words modifier plugin\n * Type: modifier\n * Name: count_words\n * Purpose: count the number of words in a text\n *\n * @link http://www.smarty.net/manual/en/language.modifier.count.words.php count_words (Smarty online manual)\n * @author Uwe Tews\n *\n * @param array $params parameters\n *\n * @return string with compiled code\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/smarty/libs/plugins/modifiercompiler.count_words.php#L22-L32", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "smarty_modifiercompiler_from_charset", "code": "function smarty_modifiercompiler_from_charset($params)\n{\n if (!Smarty::$_MBSTRING) {\n // FIXME: (rodneyrehm) shouldn't this throw an error?\n return $params[ 0 ];\n }\n\n if (!isset($params[ 1 ])) {\n $params[ 1 ] = '\"ISO-8859-1\"';\n }\n\n return 'mb_convert_encoding(' . $params[ 0 ] . ', \"' . addslashes(Smarty::$_CHARSET) . '\", ' . $params[ 1 ] . ')';\n}", "docstring": "/**\n * Smarty plugin\n *\n * @package Smarty\n * @subpackage PluginsModifierCompiler\n */\n/**\n * Smarty from_charset modifier plugin\n * Type: modifier\n * Name: from_charset\n * Purpose: convert character encoding from $charset to internal encoding\n *\n * @author Rodney Rehm\n *\n * @param array $params parameters\n *\n * @return string with compiled code\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/smarty/libs/plugins/modifiercompiler.from_charset.php#L21-L33", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Smarty_Internal_Compile_Function.compile", "code": "public function compile($args, Smarty_Internal_TemplateCompilerBase $compiler)\n {\n // check and get attributes\n $_attr = $this->getAttributes($compiler, $args);\n\n if ($_attr[ 'nocache' ] === true) {\n $compiler->trigger_template_error('nocache option not allowed', null, true);\n }\n unset($_attr[ 'nocache' ]);\n $_name = trim($_attr[ 'name' ], '\\'\"');\n $compiler->parent_compiler->tpl_function[ $_name ] = array();\n $save = array($_attr, $compiler->parser->current_buffer, $compiler->template->compiled->has_nocache_code,\n $compiler->template->caching);\n $this->openTag($compiler, 'function', $save);\n // Init temporary context\n $compiler->parser->current_buffer = new Smarty_Internal_ParseTree_Template();\n $compiler->template->compiled->has_nocache_code = false;\n $compiler->saveRequiredPlugins(true);\n return true;\n }", "docstring": "/**\n * Compiles code for the {function} tag\n *\n * @param array $args array with attributes from parser\n * @param \\Smarty_Internal_TemplateCompilerBase $compiler compiler object\n *\n * @return bool true\n * @throws \\SmartyCompilerException\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/smarty/libs/sysplugins/smarty_internal_compile_function.php#L53-L72", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Smarty_Internal_Debug.end_cache", "code": "public function end_cache(Smarty_Internal_Template $template)\n {\n $key = $this->get_key($template);\n $this->template_data[ $this->index ][ $key ][ 'cache_time' ] +=\n microtime(true) - $this->template_data[ $this->index ][ $key ][ 'start_time' ];\n }", "docstring": "/**\n * End logging of cache time\n *\n * @param \\Smarty_Internal_Template $template cached template\n */", "url": "https://github.com/avscms/avscms/blob/3908822ae404c0761df6e9c58ece543baedd4e11/include/smarty/libs/sysplugins/smarty_internal_debug.php#L166-L171", "sha": "3908822ae404c0761df6e9c58ece543baedd4e11"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Handler.register", "code": "public function register(): void\n {\n $this->reportable(function (Throwable $e) {\n //\n });\n }", "docstring": "/**\n * Register the exception handling callbacks for the application.\n */", "url": "https://github.com/alnutile/larachain/blob/0b6e2d3c4b4006dc1c18bb2114f906305ce0da3b/app/Exceptions/Handler.php#L42-L47", "sha": "0b6e2d3c4b4006dc1c18bb2114f906305ce0da3b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProjectPolicy.viewAny", "code": "public function viewAny(User $user): bool\n {\n return false;\n }", "docstring": "/**\n * Determine whether the user can view any models.\n */", "url": "https://github.com/alnutile/larachain/blob/0b6e2d3c4b4006dc1c18bb2114f906305ce0da3b/app/Policies/ProjectPolicy.php#L14-L17", "sha": "0b6e2d3c4b4006dc1c18bb2114f906305ce0da3b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoESIMPlan.setStatus", "code": "public function setStatus($status)\n {\n if (is_null($status)) {\n throw new \\InvalidArgumentException('non-nullable status cannot be null');\n }\n $allowedValues = $this->getStatusAllowableValues();\n if (!in_array($status, $allowedValues, true)) {\n throw new \\InvalidArgumentException(\n sprintf(\n \"Invalid value '%s' for 'status', must be one of '%s'\",\n $status,\n implode(\"', '\", $allowedValues)\n )\n );\n }\n $this->container['status'] = $status;\n\n return $this;\n }", "docstring": "/**\n * Sets status\n *\n * @param string $status status\n *\n * @return self\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoESIMPlan.php#L562-L580", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoESimPurchase.setShortNotes", "code": "public function setShortNotes($short_notes)\n {\n if (is_null($short_notes)) {\n throw new \\InvalidArgumentException('non-nullable short_notes cannot be null');\n }\n $this->container['short_notes'] = $short_notes;\n\n return $this;\n }", "docstring": "/**\n * Sets short_notes\n *\n * @param string $short_notes short_notes\n *\n * @return self\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoESimPurchase.php#L1143-L1151", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoPrice.openAPIFormats", "code": "public static function openAPIFormats()\n {\n return self::$openAPIFormats;\n }", "docstring": "/**\n * Array of property to format mappings. Used for (de)serialization\n *\n * @return array\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoPrice.php#L99-L102", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoTopupOffer.isNullable", "code": "public static function isNullable(string $property): bool\n {\n return self::openAPINullables()[$property] ?? false;\n }", "docstring": "/**\n * Checks if a property is nullable\n *\n * @param string $property\n * @return bool\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoTopupOffer.php#L185-L188", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoTopupPurchase.getDataGb", "code": "public function getDataGb()\n {\n return $this->container['data_gb'];\n }", "docstring": "/**\n * Gets data_gb\n *\n * @return float\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoTopupPurchase.php#L770-L773", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoTopupPurchaseMakeInput.setIfExists", "code": "private function setIfExists(string $variableName, array $fields, $defaultValue): void\n {\n if (self::isNullable($variableName) && array_key_exists($variableName, $fields) && is_null($fields[$variableName])) {\n $this->openAPINullablesSetToNull[] = $variableName;\n }\n\n $this->container[$variableName] = $fields[$variableName] ?? $defaultValue;\n }", "docstring": "/**\n * Sets $this->container[$variableName] to the given data or to the given default Value; if $variableName\n * is nullable and its value is set to null in the $fields array, then mark it as \"set to null\" in the\n * $this->openAPINullablesSetToNull array\n *\n * @param string $variableName\n * @param array $fields\n * @param mixed $defaultValue\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoTopupPurchaseMakeInput.php#L260-L267", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoTopupPurchaseResponse.getModelName", "code": "public function getModelName()\n {\n return self::$openAPIModelName;\n }", "docstring": "/**\n * The original name of the model.\n *\n * @return string\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoTopupPurchaseResponse.php#L205-L208", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoTopupPurchaseResponse.listInvalidProperties", "code": "public function listInvalidProperties()\n {\n $invalidProperties = [];\n\n if ($this->container['status'] === null) {\n $invalidProperties[] = \"'status' can't be null\";\n }\n if ($this->container['transaction_id'] === null) {\n $invalidProperties[] = \"'transaction_id' can't be null\";\n }\n return $invalidProperties;\n }", "docstring": "/**\n * Show all the invalid properties with reasons.\n *\n * @return array invalid properties with reasons\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoTopupPurchaseResponse.php#L253-L264", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoTransaction.setCurrency", "code": "public function setCurrency($currency)\n {\n if (is_null($currency)) {\n throw new \\InvalidArgumentException('non-nullable currency cannot be null');\n }\n $this->container['currency'] = $currency;\n\n return $this;\n }", "docstring": "/**\n * Sets currency\n *\n * @param string $currency currency\n *\n * @return self\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoTransaction.php#L426-L434", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoTransactionsResponse.setIfExists", "code": "private function setIfExists(string $variableName, array $fields, $defaultValue): void\n {\n if (self::isNullable($variableName) && array_key_exists($variableName, $fields) && is_null($fields[$variableName])) {\n $this->openAPINullablesSetToNull[] = $variableName;\n }\n\n $this->container[$variableName] = $fields[$variableName] ?? $defaultValue;\n }", "docstring": "/**\n * Sets $this->container[$variableName] to the given data or to the given default Value; if $variableName\n * is nullable and its value is set to null in the $fields array, then mark it as \"set to null\" in the\n * $this->openAPINullablesSetToNull array\n *\n * @param string $variableName\n * @param array $fields\n * @param mixed $defaultValue\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoTransactionsResponse.php#L253-L260", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoTransactionsResponse.__toString", "code": "public function __toString()\n {\n return json_encode(\n ObjectSerializer::sanitizeForSerialization($this),\n JSON_PRETTY_PRINT\n );\n }", "docstring": "/**\n * Gets the string presentation of the object\n *\n * @return string\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoTransactionsResponse.php#L477-L483", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoVoucherField.setIfExists", "code": "private function setIfExists(string $variableName, array $fields, $defaultValue): void\n {\n if (self::isNullable($variableName) && array_key_exists($variableName, $fields) && is_null($fields[$variableName])) {\n $this->openAPINullablesSetToNull[] = $variableName;\n }\n\n $this->container[$variableName] = $fields[$variableName] ?? $defaultValue;\n }", "docstring": "/**\n * Sets $this->container[$variableName] to the given data or to the given default Value; if $variableName\n * is nullable and its value is set to null in the $fields array, then mark it as \"set to null\" in the\n * $this->openAPINullablesSetToNull array\n *\n * @param string $variableName\n * @param array $fields\n * @param mixed $defaultValue\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoVoucherField.php#L239-L246", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoZend.openAPIFormats", "code": "public static function openAPIFormats()\n {\n return self::$openAPIFormats;\n }", "docstring": "/**\n * Array of property to format mappings. Used for (de)serialization\n *\n * @return array\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoZend.php#L90-L93", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DtoZend.setIfExists", "code": "private function setIfExists(string $variableName, array $fields, $defaultValue): void\n {\n if (self::isNullable($variableName) && array_key_exists($variableName, $fields) && is_null($fields[$variableName])) {\n $this->openAPINullablesSetToNull[] = $variableName;\n }\n\n $this->container[$variableName] = $fields[$variableName] ?? $defaultValue;\n }", "docstring": "/**\n * Sets $this->container[$variableName] to the given data or to the given default Value; if $variableName\n * is nullable and its value is set to null in the $fields array, then mark it as \"set to null\" in the\n * $this->openAPINullablesSetToNull array\n *\n * @param string $variableName\n * @param array $fields\n * @param mixed $defaultValue\n */", "url": "https://github.com/zenditplatform/zendit-php-sdk/blob/b8512157cdbbcec5f99a92154d21665fd971b12a/lib/Model/DtoZend.php#L260-L267", "sha": "b8512157cdbbcec5f99a92154d21665fd971b12a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GeneratedClassTest.testMessageWithoutNamespace", "code": "public function testMessageWithoutNamespace()\n {\n $m = new TestMessage();\n $n = new NoNameSpaceMessage();\n $m->setOptionalNoNamespaceMessage($n);\n $repeatedNoNamespaceMessage = $m->getRepeatedNoNamespaceMessage();\n $repeatedNoNamespaceMessage[] = new NoNameSpaceMessage();\n $m->setRepeatedNoNamespaceMessage($repeatedNoNamespaceMessage);\n\n // test nested messages\n $sub = new NoNamespaceMessage\\NestedMessage();\n $n->setNestedMessage($sub);\n\n $this->assertTrue(true);\n }", "docstring": "#########################################################\n# Test message/enum without namespace.\n#########################################################", "url": "https://github.com/srivatsankrishnan/oss-arch-gym/blob/fab6d1442541b5cdf40daf24e64e63261da2d846/sims/AstraSim/protobuf-3.12.4/php/tests/generated_class_test.php#L703-L717", "sha": "fab6d1442541b5cdf40daf24e64e63261da2d846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WrapperTypeSettersTest.testConstructorWithMapWrapperType", "code": "public function testConstructorWithMapWrapperType($wrapperField, $getter, $value)\n {\n $actualInstance = new TestWrapperSetters([$wrapperField => $value]);\n foreach ($actualInstance->$getter() as $key => $actualWrapperValue) {\n $actualInnerValue = $actualWrapperValue->getValue();\n $expectedElement = $value[$key];\n if (is_object($expectedElement) && is_a($expectedElement, '\\Google\\Protobuf\\StringValue')) {\n $expectedInnerValue = $expectedElement->getValue();\n } elseif (is_object($expectedElement) && is_a($expectedElement, '\\Google\\Protobuf\\Internal\\MapEntry')) {\n $expectedInnerValue = $expectedElement->getValue()->getValue();\n } else {\n $expectedInnerValue = $expectedElement;\n }\n $this->assertEquals($expectedInnerValue, $actualInnerValue);\n }\n }", "docstring": "/**\n * @dataProvider constructorWithMapWrapperTypeDataProvider\n */", "url": "https://github.com/srivatsankrishnan/oss-arch-gym/blob/fab6d1442541b5cdf40daf24e64e63261da2d846/sims/AstraSim/protobuf-3.12.4/php/tests/wrapper_type_setters_test.php#L274-L289", "sha": "fab6d1442541b5cdf40daf24e64e63261da2d846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CreateStocksTable.up", "code": "public function up()\n {\n Schema::create('stocks', function (Blueprint $table) {\n $table->increments('id');\n $table->string('product_code');\n $table->integer('product_id');\n $table->integer('category_id');\n $table->integer('vendor_id');\n $table->integer('user_id');\n $table->string('chalan_no');\n $table->double('buying_price');\n $table->double('selling_price');\n $table->double('discount')->default(0);\n $table->integer('stock_quantity');\n $table->integer('current_quantity')->default(0);\n $table->text('note')->nullable();\n $table->tinyInteger('status')->default(1);\n $table->timestamps();\n });\n }", "docstring": "/**\n * Run the migrations.\n *\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/database/migrations/2018_12_10_052521_create_stocks_table.php#L14-L33", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Xdg.getHomeConfigDir", "code": "public function getHomeConfigDir()\n {\n $path = getenv('XDG_CONFIG_HOME') ?: $this->getHomeDir() . DIRECTORY_SEPARATOR . '.config';\n\n return $path;\n }", "docstring": "/**\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/dnoegel/php-xdg-base-dir/src/Xdg.php#L31-L36", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SimpleSerializableAsset.__construct", "code": "public function __construct()\n {\n throw new BadMethodCallException('Not supposed to be called!');\n }", "docstring": "/**\n * Constructor - should not be called\n *\n * @throws BadMethodCallException\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/doctrine/instantiator/tests/DoctrineTest/InstantiatorTestAsset/SimpleSerializableAsset.php#L37-L40", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Address.buildingNumber", "code": "public static function buildingNumber()\n {\n return static::numerify(static::randomElement(static::$buildingNumber));\n }", "docstring": "/**\n * @example '791'\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/fzaninotto/faker/src/Faker/Provider/Address.php#L45-L48", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Address.country", "code": "public static function country()\n {\n return static::randomElement(static::$country);\n }", "docstring": "/**\n * @example 'Japan'\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/fzaninotto/faker/src/Faker/Provider/Address.php#L101-L104", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Payment.bankAccountNumber", "code": "public static function bankAccountNumber($prefix = '', $countryCode = 'DE', $length = null)\n {\n return static::iban($countryCode, $prefix, $length);\n }", "docstring": "/**\n * International Bank Account Number (IBAN)\n * @link http://en.wikipedia.org/wiki/International_Bank_Account_Number\n * @param string $prefix for generating bank account number of a specific bank\n * @param string $countryCode ISO 3166-1 alpha-2 country code\n * @param integer $length total length without country code and 2 check digits\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/fzaninotto/faker/src/Faker/Provider/de_DE/Payment.php#L15-L18", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Address.randomPostcodeLetter", "code": "public static function randomPostcodeLetter()\n {\n return static::randomElement(static::$postcodeLetters);\n }", "docstring": "/**\n * Returns a postalcode-safe letter\n * @example A1B 2C3\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/fzaninotto/faker/src/Faker/Provider/en_CA/Address.php#L47-L50", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Address.citySuffix", "code": "public static function citySuffix()\n {\n return static::randomElement(static::$citySuffix);\n }", "docstring": "/**\n * @example '-des-Sables'\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/fzaninotto/faker/src/Faker/Provider/fr_CA/Address.php#L97-L100", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Person.firstNameMaleChristian", "code": "public static function firstNameMaleChristian()\n {\n return static::randomElement(static::$firstNameMaleChristian);\n }", "docstring": "/**\n * Return a Christian male name\n * \n * @example 'Aaron'\n * \n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/fzaninotto/faker/src/Faker/Provider/ms_MY/Person.php#L682-L685", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AuthManager.resolve", "code": "protected function resolve($name)\n {\n $config = $this->getConfig($name);\n\n if (is_null($config)) {\n throw new InvalidArgumentException(\"Auth guard [{$name}] is not defined.\");\n }\n\n if (isset($this->customCreators[$config['driver']])) {\n return $this->callCustomCreator($name, $config);\n }\n\n $driverMethod = 'create'.ucfirst($config['driver']).'Driver';\n\n if (method_exists($this, $driverMethod)) {\n return $this->{$driverMethod}($name, $config);\n }\n\n throw new InvalidArgumentException(\"Auth guard driver [{$name}] is not defined.\");\n }", "docstring": "/**\n * Resolve the given guard.\n *\n * @param string $name\n * @return \\Illuminate\\Contracts\\Auth\\Guard|\\Illuminate\\Contracts\\Auth\\StatefulGuard\n *\n * @throws \\InvalidArgumentException\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Auth/AuthManager.php#L79-L98", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EloquentUserProvider.retrieveByToken", "code": "public function retrieveByToken($identifier, $token)\n {\n $model = $this->createModel();\n\n $model = $model->where($model->getAuthIdentifierName(), $identifier)->first();\n\n if (! $model) {\n return null;\n }\n\n $rememberToken = $model->getRememberToken();\n\n return $rememberToken && hash_equals($rememberToken, $token) ? $model : null;\n }", "docstring": "/**\n * Retrieve a user by their unique identifier and \"remember me\" token.\n *\n * @param mixed $identifier\n * @param string $token\n * @return \\Illuminate\\Contracts\\Auth\\Authenticatable|null\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Auth/EloquentUserProvider.php#L61-L74", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SessionGuard.cycleRememberToken", "code": "protected function cycleRememberToken(AuthenticatableContract $user)\n {\n $user->setRememberToken($token = Str::random(60));\n\n $this->provider->updateRememberToken($user, $token);\n }", "docstring": "/**\n * Refresh the \"remember me\" token for the user.\n *\n * @param \\Illuminate\\Contracts\\Auth\\Authenticatable $user\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Auth/SessionGuard.php#L529-L534", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Gate.resolvePolicy", "code": "public function resolvePolicy($class)\n {\n return $this->container->make($class);\n }", "docstring": "/**\n * Build a policy class instance of the given type.\n *\n * @param object|string $class\n * @return mixed\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Auth/Access/Gate.php#L436-L439", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RedisTaggedCache.deleteForeverKeys", "code": "protected function deleteForeverKeys()\n {\n $this->deleteKeysByReference(self::REFERENCE_KEY_FOREVER);\n }", "docstring": "/**\n * Delete all of the items that were stored forever.\n *\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Cache/RedisTaggedCache.php#L108-L111", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Repository.getMinutes", "code": "protected function getMinutes($duration)\n {\n $duration = $this->parseDateInterval($duration);\n\n if ($duration instanceof DateTimeInterface) {\n $duration = Carbon::now()->diffInSeconds(Carbon::createFromTimestamp($duration->getTimestamp()), false) / 60;\n }\n\n return (int) ($duration * 60) > 0 ? $duration : null;\n }", "docstring": "/**\n * Calculate the number of minutes with the given duration.\n *\n * @param \\DateTimeInterface|\\DateInterval|float|int $duration\n * @return float|int|null\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Cache/Repository.php#L550-L559", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Command.run", "code": "public function run(InputInterface $input, OutputInterface $output)\n {\n return parent::run(\n $this->input = $input, $this->output = new OutputStyle($input, $output)\n );\n }", "docstring": "/**\n * Run the console command.\n *\n * @param \\Symfony\\Component\\Console\\Input\\InputInterface $input\n * @param \\Symfony\\Component\\Console\\Output\\OutputInterface $output\n * @return int\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Console/Command.php#L167-L172", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Connection.insert", "code": "public function insert($query, $bindings = [])\n {\n return $this->statement($query, $bindings);\n }", "docstring": "/**\n * Run an insert statement against the database.\n *\n * @param string $query\n * @param array $bindings\n * @return bool\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/Connection.php#L409-L412", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SqlServerConnection.getSchemaBuilder", "code": "public function getSchemaBuilder()\n {\n if (is_null($this->schemaGrammar)) {\n $this->useDefaultSchemaGrammar();\n }\n\n return new SqlServerBuilder($this);\n }", "docstring": "/**\n * Get a schema builder instance for the connection.\n *\n * @return \\Illuminate\\Database\\Schema\\SqlServerBuilder\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/SqlServerConnection.php#L75-L82", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StatusCommand.__construct", "code": "public function __construct(Migrator $migrator)\n {\n parent::__construct();\n\n $this->migrator = $migrator;\n }", "docstring": "/**\n * Create a new migration rollback command instance.\n *\n * @param \\Illuminate\\Database\\Migrations\\Migrator $migrator\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/Console/Migrations/StatusCommand.php#L38-L43", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HasManyThrough.findOrFail", "code": "public function findOrFail($id, $columns = ['*'])\n {\n $result = $this->find($id, $columns);\n\n if (is_array($id)) {\n if (count($result) == count(array_unique($id))) {\n return $result;\n }\n } elseif (! is_null($result)) {\n return $result;\n }\n\n throw (new ModelNotFoundException)->setModel(get_class($this->related));\n }", "docstring": "/**\n * Find a related model by its primary key or throw an exception.\n *\n * @param mixed $id\n * @param array $columns\n * @return \\Illuminate\\Database\\Eloquent\\Model|\\Illuminate\\Database\\Eloquent\\Collection\n *\n * @throws \\Illuminate\\Database\\Eloquent\\ModelNotFoundException\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/Eloquent/Relations/HasManyThrough.php#L324-L337", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MorphMany.getResults", "code": "public function getResults()\n {\n return $this->query->get();\n }", "docstring": "/**\n * Get the results of the relationship.\n *\n * @return mixed\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/Eloquent/Relations/MorphMany.php#L14-L17", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Builder.addDynamic", "code": "protected function addDynamic($segment, $connector, $parameters, $index)\n {\n // Once we have parsed out the columns and formatted the boolean operators we\n // are ready to add it to this query as a where clause just like any other\n // clause on the query. Then we'll increment the parameter index values.\n $bool = strtolower($connector);\n\n $this->where(Str::snake($segment), '=', $parameters[$index], $bool);\n }", "docstring": "/**\n * Add a single dynamic where clause statement to the query.\n *\n * @param string $segment\n * @param string $connector\n * @param array $parameters\n * @param int $index\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/Query/Builder.php#L1302-L1310", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Grammar.dateBasedWhere", "code": "protected function dateBasedWhere($type, Builder $query, $where)\n {\n $value = $this->parameter($where['value']);\n\n return $type.'('.$this->wrap($where['column']).') '.$where['operator'].' '.$value;\n }", "docstring": "/**\n * Compile a date based where clause.\n *\n * @param string $type\n * @param \\Illuminate\\Database\\Query\\Builder $query\n * @param array $where\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/Query/Grammars/Grammar.php#L402-L407", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Blueprint.primary", "code": "public function primary($columns, $name = null, $algorithm = null)\n {\n return $this->indexCommand('primary', $columns, $name, $algorithm);\n }", "docstring": "/**\n * Specify the primary key(s) for the table.\n *\n * @param string|array $columns\n * @param string $name\n * @param string|null $algorithm\n * @return \\Illuminate\\Support\\Fluent\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/Schema/Blueprint.php#L368-L371", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MySqlGrammar.typeBoolean", "code": "protected function typeBoolean(Fluent $column)\n {\n return 'tinyint(1)';\n }", "docstring": "/**\n * Create the column definition for a boolean type.\n *\n * @param \\Illuminate\\Support\\Fluent $column\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Database/Schema/Grammars/MySqlGrammar.php#L539-L542", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TestResponse.assertSee", "code": "public function assertSee($value)\n {\n PHPUnit::assertContains($value, $this->getContent());\n\n return $this;\n }", "docstring": "/**\n * Assert that the given string is contained within the response.\n *\n * @param string $value\n * @return $this\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Foundation/Testing/TestResponse.php#L251-L256", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Writer.notice", "code": "public function notice($message, array $context = [])\n {\n $this->writeLog(__FUNCTION__, $message, $context);\n }", "docstring": "/**\n * Log a notice to the logs.\n *\n * @param string $message\n * @param array $context\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Log/Writer.php#L136-L139", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Mailable.buildViewData", "code": "public function buildViewData()\n {\n $data = $this->viewData;\n\n foreach ((new ReflectionClass($this))->getProperties(ReflectionProperty::IS_PUBLIC) as $property) {\n if ($property->getDeclaringClass()->getName() != self::class) {\n $data[$property->getName()] = $property->getValue($this);\n }\n }\n\n return $data;\n }", "docstring": "/**\n * Build the view data for the message.\n *\n * @return array\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Mail/Mailable.php#L227-L238", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Markdown.__construct", "code": "public function __construct(ViewFactory $view, array $options = [])\n {\n $this->view = $view;\n $this->theme = $options['theme'] ?? 'default';\n $this->loadComponentsFrom($options['paths'] ?? []);\n }", "docstring": "/**\n * Create a new Markdown renderer instance.\n *\n * @param \\Illuminate\\Contracts\\View\\Factory $view\n * @param array $options\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Mail/Markdown.php#L40-L45", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Message.prepAttachment", "code": "protected function prepAttachment($attachment, $options = [])\n {\n // First we will check for a MIME type on the message, which instructs the\n // mail client on what type of attachment the file is so that it may be\n // downloaded correctly by the user. The MIME option is not required.\n if (isset($options['mime'])) {\n $attachment->setContentType($options['mime']);\n }\n\n // If an alternative name was given as an option, we will set that on this\n // attachment so that it will be downloaded with the desired names from\n // the developer, otherwise the default file names will get assigned.\n if (isset($options['as'])) {\n $attachment->setFilename($options['as']);\n }\n\n $this->swift->attach($attachment);\n\n return $this;\n }", "docstring": "/**\n * Prepare and attach the given attachment.\n *\n * @param \\Swift_Attachment $attachment\n * @param array $options\n * @return $this\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Mail/Message.php#L284-L303", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DatabaseJobRecord.__construct", "code": "public function __construct($record)\n {\n $this->record = $record;\n }", "docstring": "/**\n * Create a new job record instance.\n *\n * @param \\stdClass $record\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Queue/Jobs/DatabaseJobRecord.php#L24-L27", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MiddlewareNameResolver.resolve", "code": "public static function resolve($name, $map, $middlewareGroups)\n {\n // When the middleware is simply a Closure, we will return this Closure instance\n // directly so that Closures can be registered as middleware inline, which is\n // convenient on occasions when the developers are experimenting with them.\n if ($name instanceof Closure) {\n return $name;\n }\n\n if (isset($map[$name]) && $map[$name] instanceof Closure) {\n return $map[$name];\n }\n\n // If the middleware is the name of a middleware group, we will return the array\n // of middlewares that belong to the group. This allows developers to group a\n // set of middleware under single keys that can be conveniently referenced.\n if (isset($middlewareGroups[$name])) {\n return static::parseMiddlewareGroup($name, $map, $middlewareGroups);\n }\n\n // Finally, when the middleware is simply a string mapped to a class name the\n // middleware name will get parsed into the full class name and parameters\n // which may be run using the Pipeline which accepts this string format.\n list($name, $parameters) = array_pad(explode(':', $name, 2), 2, null);\n\n return ($map[$name] ?? $name).(! is_null($parameters) ? ':'.$parameters : '');\n }", "docstring": "/**\n * Resolve the middleware name to a class name(s) preserving passed parameters.\n *\n * @param string $name\n * @param array $map\n * @param array $middlewareGroups\n * @return \\Closure|string|array\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Routing/MiddlewareNameResolver.php#L17-L43", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RouteCompiler.getOptionalParameters", "code": "protected function getOptionalParameters()\n {\n preg_match_all('/\\{(\\w+?)\\?\\}/', $this->route->uri(), $matches);\n\n return isset($matches[1]) ? array_fill_keys($matches[1], null) : [];\n }", "docstring": "/**\n * Get the optional parameters for the route.\n *\n * @return array\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Routing/RouteCompiler.php#L48-L53", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CacheBasedSessionHandler.read", "code": "public function read($sessionId)\n {\n return $this->cache->get($sessionId, '');\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Session/CacheBasedSessionHandler.php#L56-L59", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Store.flashInput", "code": "public function flashInput(array $value)\n {\n $this->flash('_old_input', $value);\n }", "docstring": "/**\n * Flash an input array to the session.\n *\n * @param array $value\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Session/Store.php#L418-L421", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Collection.split", "code": "public function split($numberOfGroups)\n {\n if ($this->isEmpty()) {\n return new static;\n }\n\n $groupSize = ceil($this->count() / $numberOfGroups);\n\n return $this->chunk($groupSize);\n }", "docstring": "/**\n * Split a collection into a certain number of groups.\n *\n * @param int $numberOfGroups\n * @return static\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Support/Collection.php#L1333-L1342", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ViewErrorBag.getBag", "code": "public function getBag($key)\n {\n return Arr::get($this->bags, $key) ?: new MessageBag;\n }", "docstring": "/**\n * Get a MessageBag instance from the bags.\n *\n * @param string $key\n * @return \\Illuminate\\Contracts\\Support\\MessageBag\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Support/ViewErrorBag.php#L37-L40", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Crypt.getFacadeAccessor", "code": "protected static function getFacadeAccessor()\n {\n return 'encrypter';\n }", "docstring": "/**\n * Get the registered name of the component.\n *\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Support/Facades/Crypt.php#L15-L18", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Facade.isMock", "code": "protected static function isMock()\n {\n $name = static::getFacadeAccessor();\n\n return isset(static::$resolvedInstance[$name]) &&\n static::$resolvedInstance[$name] instanceof MockInterface;\n }", "docstring": "/**\n * Determines whether a mock is set as the instance of the facade.\n *\n * @return bool\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/laravel/framework/src/Illuminate/Support/Facades/Facade.php#L86-L92", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Ftp.rename", "code": "public function rename($path, $newpath)\n {\n return ftp_rename($this->getConnection(), $path, $newpath);\n }", "docstring": "/**\n * @inheritdoc\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/league/flysystem/src/Adapter/Ftp.php#L295-L298", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Local.writeStream", "code": "public function writeStream($path, $resource, Config $config)\n {\n $location = $this->applyPathPrefix($path);\n $this->ensureDirectory(dirname($location));\n $stream = fopen($location, 'w+b');\n\n if ( ! $stream || stream_copy_to_stream($resource, $stream) === false || ! fclose($stream)) {\n return false;\n }\n\n $type = 'file';\n $result = compact('type', 'path');\n\n if ($visibility = $config->get('visibility')) {\n $this->setVisibility($path, $visibility);\n $result['visibility'] = $visibility;\n }\n\n return $result;\n }", "docstring": "/**\n * @inheritdoc\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/league/flysystem/src/Adapter/Local.php#L153-L172", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Mockery.notAnyOf", "code": "public static function notAnyOf(...$args)\n {\n return new \\Mockery\\Matcher\\NotAnyOf($args);\n }", "docstring": "/**\n * Return instance of NOTANYOF matcher.\n *\n * @param array ...$args\n *\n * @return \\Mockery\\Matcher\\NotAnyOf\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/mockery/mockery/library/Mockery.php#L489-L492", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StringManipulationGenerator.withDefaultPasses", "code": "public static function withDefaultPasses()\n {\n return new static([\n new CallTypeHintPass(),\n new MagicMethodTypeHintsPass(),\n new ClassPass(),\n new TraitPass(),\n new ClassNamePass(),\n new InstanceMockPass(),\n new InterfacePass(),\n new MethodDefinitionPass(),\n new RemoveUnserializeForInternalSerializableClassesPass(),\n new RemoveBuiltinMethodsThatAreFinalPass(),\n new RemoveDestructorPass(),\n new ConstantsPass(),\n ]);\n }", "docstring": "/**\n * Creates a new StringManipulationGenerator with the default passes\n *\n * @return StringManipulationGenerator\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/mockery/mockery/library/Mockery/Generator/StringManipulationGenerator.php#L46-L62", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ContainerTest.testHandlesMethodWithArgumentExpectationWhenCalledWithNestedArray", "code": "public function testHandlesMethodWithArgumentExpectationWhenCalledWithNestedArray()\n {\n $testArray = array();\n $testArray['a_scalar'] = 2;\n $testArray['an_array'] = array(1, 2, 3);\n\n $mock = mock('MyTestClass');\n $mock->shouldReceive('foo')->with(array('yourself' => 21));\n\n $mock->foo($testArray);\n }", "docstring": "/**\n * @expectedException Mockery\\Exception\\NoMatchingExpectationException\n * @expectedExceptionMessage MyTestClass::foo(['a_scalar' => 2, 'an_array' => [...]])\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/mockery/mockery/tests/Mockery/ContainerTest.php#L1241-L1251", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExpectationTest.testGroupedOrderingThrowsExceptionWhenCallsDisordered", "code": "public function testGroupedOrderingThrowsExceptionWhenCallsDisordered()\n {\n $this->mock->shouldReceive('foo')->ordered('first');\n $this->mock->shouldReceive('bar')->ordered('second');\n $this->mock->bar();\n $this->mock->foo();\n Mockery::close();\n }", "docstring": "/**\n * @expectedException \\Mockery\\Exception\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/mockery/mockery/tests/Mockery/ExpectationTest.php#L895-L902", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Logger.alert", "code": "public function alert($message, array $context = array())\n {\n return $this->addRecord(static::ALERT, $message, $context);\n }", "docstring": "/**\n * Adds a log record at the ALERT level.\n *\n * This method allows for compatibility with common interfaces.\n *\n * @param string $message The log message\n * @param array $context The log context\n * @return bool Whether the record has been processed\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/monolog/monolog/src/Monolog/Logger.php#L747-L750", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HtmlFormatter.formatBatch", "code": "public function formatBatch(array $records)\n {\n $message = '';\n foreach ($records as $record) {\n $message .= $this->format($record);\n }\n\n return $message;\n }", "docstring": "/**\n * Formats a set of log records.\n *\n * @param array $records A set of records to format\n * @return mixed The formatted set of records\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/monolog/monolog/src/Monolog/Formatter/HtmlFormatter.php#L118-L126", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SocketHandler.write", "code": "protected function write(array $record)\n {\n $this->connectIfNotConnected();\n $data = $this->generateDataStream($record);\n $this->writeToSocket($data);\n }", "docstring": "/**\n * Connect (if necessary) and write to the socket\n *\n * @param array $record\n *\n * @throws \\UnexpectedValueException\n * @throws \\RuntimeException\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/monolog/monolog/src/Monolog/Handler/SocketHandler.php#L56-L61", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TestHandler.hasRecord", "code": "public function hasRecord($record, $level)\n {\n if (is_string($record)) {\n $record = array('message' => $record);\n }\n\n return $this->hasRecordThatPasses(function ($rec) use ($record) {\n if ($rec['message'] !== $record['message']) {\n return false;\n }\n if (isset($record['context']) && $rec['context'] !== $record['context']) {\n return false;\n }\n return true;\n }, $level);\n }", "docstring": "/**\n * @param string|array $record Either a message string or an array containing message and optionally context keys that will be checked against all records\n * @param int $level Logger::LEVEL constant value\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/monolog/monolog/src/Monolog/Handler/TestHandler.php#L91-L106", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RegistryTest.testReplacesLogger", "code": "public function testReplacesLogger()\n {\n $log1 = new Logger('test1');\n $log2 = new Logger('test2');\n\n Registry::addLogger($log1, 'log');\n\n Registry::addLogger($log2, 'log', true);\n\n $this->assertSame($log2, Registry::getInstance('log'));\n }", "docstring": "/**\n * @covers Monolog\\Registry::addLogger\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/monolog/monolog/tests/Monolog/RegistryTest.php#L128-L138", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChromePHPFormatterTest.testBatchFormatThrowException", "code": "public function testBatchFormatThrowException()\n {\n $formatter = new ChromePHPFormatter();\n $records = array(\n array(\n 'level' => Logger::INFO,\n 'level_name' => 'INFO',\n 'channel' => 'meh',\n 'context' => array(),\n 'datetime' => new \\DateTime(\"@0\"),\n 'extra' => array(),\n 'message' => 'log',\n ),\n array(\n 'level' => Logger::WARNING,\n 'level_name' => 'WARNING',\n 'channel' => 'foo',\n 'context' => array(),\n 'datetime' => new \\DateTime(\"@0\"),\n 'extra' => array(),\n 'message' => 'log2',\n ),\n );\n\n $this->assertEquals(\n array(\n array(\n 'meh',\n 'log',\n 'unknown',\n 'info',\n ),\n array(\n 'foo',\n 'log2',\n 'unknown',\n 'warn',\n ),\n ),\n $formatter->formatBatch($records)\n );\n }", "docstring": "/**\n * @covers Monolog\\Formatter\\ChromePHPFormatter::formatBatch\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/monolog/monolog/tests/Monolog/Formatter/ChromePHPFormatterTest.php#L116-L157", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CronExpressionTest.testIsDueHandlesDifferentTimezones", "code": "public function testIsDueHandlesDifferentTimezones()\n {\n $cron = CronExpression::factory('0 15 * * 3'); //Wednesday at 15:00\n $date = '2014-01-01 15:00'; //Wednesday\n $utc = new DateTimeZone('UTC');\n $amsterdam = new DateTimeZone('Europe/Amsterdam');\n $tokyo = new DateTimeZone('Asia/Tokyo');\n\n date_default_timezone_set('UTC');\n $this->assertTrue($cron->isDue(new DateTime($date, $utc)));\n $this->assertFalse($cron->isDue(new DateTime($date, $amsterdam)));\n $this->assertFalse($cron->isDue(new DateTime($date, $tokyo)));\n\n date_default_timezone_set('Europe/Amsterdam');\n $this->assertFalse($cron->isDue(new DateTime($date, $utc)));\n $this->assertTrue($cron->isDue(new DateTime($date, $amsterdam)));\n $this->assertFalse($cron->isDue(new DateTime($date, $tokyo)));\n\n date_default_timezone_set('Asia/Tokyo');\n $this->assertFalse($cron->isDue(new DateTime($date, $utc)));\n $this->assertFalse($cron->isDue(new DateTime($date, $amsterdam)));\n $this->assertTrue($cron->isDue(new DateTime($date, $tokyo)));\n }", "docstring": "/**\n * @covers Cron\\CronExpression::isDue\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/mtdowling/cron-expression/tests/Cron/CronExpressionTest.php#L223-L245", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DeepCopy.getFirstMatchedTypeFilter", "code": "private function getFirstMatchedTypeFilter(array $filterRecords, $var)\n {\n $matched = $this->first(\n $filterRecords,\n function (array $record) use ($var) {\n /* @var TypeMatcher $matcher */\n $matcher = $record['matcher'];\n\n return $matcher->matches($var);\n }\n );\n\n return isset($matched) ? $matched['filter'] : null;\n }", "docstring": "/**\n * Returns first filter that matches variable, `null` if no such filter found.\n *\n * @param array $filterRecords Associative array with 2 members: 'filter' with value of type {@see TypeFilter} and\n * 'matcher' with value of type {@see TypeMatcher}\n * @param mixed $var\n *\n * @return TypeFilter|null\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/myclabs/deep-copy/src/DeepCopy/DeepCopy.php#L247-L260", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Carbon.createFromDate", "code": "public static function createFromDate($year = null, $month = null, $day = null, $tz = null)\n {\n return static::create($year, $month, $day, null, null, null, $tz);\n }", "docstring": "/**\n * Create a Carbon instance from just a date. The time portion is set to now.\n *\n * @param int|null $year\n * @param int|null $month\n * @param int|null $day\n * @param \\DateTimeZone|string|null $tz\n *\n * @throws \\InvalidArgumentException\n *\n * @return static\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nesbot/carbon/src/Carbon/Carbon.php#L806-L809", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Carbon.subWeekdays", "code": "public function subWeekdays($value)\n {\n return $this->addWeekdays(-1 * $value);\n }", "docstring": "/**\n * Remove weekdays from the instance\n *\n * @param int $value\n *\n * @return static\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nesbot/carbon/src/Carbon/Carbon.php#L3400-L3403", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CarbonInterval.copy", "code": "public function copy()\n {\n $date = new static($this->spec());\n $date->invert = $this->invert;\n\n return $date;\n }", "docstring": "/**\n * Get a copy of the instance.\n *\n * @return static\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nesbot/carbon/src/Carbon/CarbonInterval.php#L316-L322", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CarbonPeriod.createFromIso", "code": "public static function createFromIso($iso, $options = null)\n {\n $params = static::parseIso8601($iso);\n\n $instance = static::createFromArray($params);\n\n if ($options !== null) {\n $instance->setOptions($options);\n }\n\n return $instance;\n }", "docstring": "/**\n * Create CarbonPeriod from ISO 8601 string.\n *\n * @param string $iso\n * @param int|null $options\n *\n * @return static\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nesbot/carbon/src/Carbon/CarbonPeriod.php#L254-L265", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FullyQualified.isQualified", "code": "public function isQualified() : bool {\n return false;\n }", "docstring": "/**\n * Checks whether the name is qualified. (E.g. Name\\Name)\n *\n * @return bool Whether the name is qualified\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nikic/php-parser/lib/PhpParser/Node/Name/FullyQualified.php#L21-L23", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Interface_.__construct", "code": "public function __construct($name, array $subNodes = [], array $attributes = []) {\n parent::__construct($attributes);\n $this->name = \\is_string($name) ? new Node\\Identifier($name) : $name;\n $this->extends = $subNodes['extends'] ?? [];\n $this->stmts = $subNodes['stmts'] ?? [];\n }", "docstring": "/**\n * Constructs a class node.\n *\n * @param string|Node\\Identifier $name Name\n * @param array $subNodes Array of the following optional subnodes:\n * 'extends' => array(): Name of extended interfaces\n * 'stmts' => array(): Statements\n * @param array $attributes Additional attributes\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nikic/php-parser/lib/PhpParser/Node/Stmt/Interface_.php#L21-L26", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TryCatch.__construct", "code": "public function __construct(array $stmts, array $catches, Finally_ $finally = null, array $attributes = []) {\n parent::__construct($attributes);\n $this->stmts = $stmts;\n $this->catches = $catches;\n $this->finally = $finally;\n }", "docstring": "/**\n * Constructs a try catch node.\n *\n * @param Node\\Stmt[] $stmts Statements\n * @param Catch_[] $catches Catches\n * @param null|Finally_ $finally Optionaly finally node\n * @param array $attributes Additional attributes\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nikic/php-parser/lib/PhpParser/Node/Stmt/TryCatch.php#L24-L29", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ParserFactoryTest.testCreate", "code": "public function testCreate($kind, $lexer, $expected) {\n $this->assertInstanceOf($expected, (new ParserFactory)->create($kind, $lexer));\n }", "docstring": "/** @dataProvider provideTestCreate */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nikic/php-parser/test/PhpParser/ParserFactoryTest.php#L12-L14", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EmulativeTest.testNoReplaceKeywordsAfterObjectOperator", "code": "public function testNoReplaceKeywordsAfterObjectOperator($keyword) {\n $lexer = $this->getLexer();\n $lexer->startLexing('' . $keyword);\n\n $this->assertSame(Tokens::T_OBJECT_OPERATOR, $lexer->getNextToken());\n $this->assertSame(Tokens::T_STRING, $lexer->getNextToken());\n $this->assertSame(0, $lexer->getNextToken());\n }", "docstring": "/**\n * @dataProvider provideTestReplaceKeywords\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/nikic/php-parser/test/PhpParser/Lexer/EmulativeTest.php#L31-L38", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BundledComponentCollectionIterator.valid", "code": "public function valid() {\n return $this->position < count($this->bundledComponents);\n }", "docstring": "/**\n * @return bool\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phar-io/manifest/src/values/BundledComponentCollectionIterator.php#L35-L37", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ManifestDocument.__construct", "code": "private function __construct(DOMDocument $dom) {\n $this->ensureCorrectDocumentType($dom);\n\n $this->dom = $dom;\n }", "docstring": "/**\n * ManifestDocument constructor.\n *\n * @param DOMDocument $dom\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phar-io/manifest/src/xml/ManifestDocument.php#L29-L33", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DocBlockFactory.filterTagBlock", "code": "private function filterTagBlock($tags)\n {\n $tags = trim($tags);\n if (!$tags) {\n return null;\n }\n\n if ('@' !== $tags[0]) {\n // @codeCoverageIgnoreStart\n // Can't simulate this; this only happens if there is an error with the parsing of the DocBlock that\n // we didn't foresee.\n throw new \\LogicException('A tag block started with text instead of an at-sign(@): ' . $tags);\n // @codeCoverageIgnoreEnd\n }\n\n return $tags;\n }", "docstring": "/**\n * @param $tags\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpdocumentor/reflection-docblock/src/DocBlockFactory.php#L260-L276", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Mixed_.__toString", "code": "public function __toString()\n {\n return 'mixed';\n }", "docstring": "/**\n * Returns a rendered output of the Type as it would be used in a DocBlock.\n *\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpdocumentor/type-resolver/src/Types/Mixed_.php#L27-L30", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReflectionClassNewInstancePatch.supports", "code": "public function supports(ClassNode $node)\n {\n return 'ReflectionClass' === $node->getParentClass();\n }", "docstring": "/**\n * Supports ReflectionClass\n *\n * @param ClassNode $node\n *\n * @return bool\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpspec/prophecy/src/Prophecy/Doubler/ClassPatch/ReflectionClassNewInstancePatch.php#L31-L34", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReflectionClassNewInstancePatch.apply", "code": "public function apply(ClassNode $node)\n {\n foreach ($node->getMethod('newInstance')->getArguments() as $argument) {\n $argument->setDefault(null);\n }\n }", "docstring": "/**\n * Updates newInstance's first argument to make it optional\n *\n * @param ClassNode $node\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpspec/prophecy/src/Prophecy/Doubler/ClassPatch/ReflectionClassNewInstancePatch.php#L41-L46", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AbstractNode.getLinesOfCode", "code": "abstract public function getLinesOfCode();", "docstring": "/**\n * Returns the LOC/CLOC/NCLOC of this node.\n *\n * @return array\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/php-code-coverage/src/Node/AbstractNode.php#L333-L333", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Directory.renderItem", "code": "protected function renderItem(Node $node, $total = false)\n {\n $data = [\n 'numClasses' => $node->getNumClassesAndTraits(),\n 'numTestedClasses' => $node->getNumTestedClassesAndTraits(),\n 'numMethods' => $node->getNumFunctionsAndMethods(),\n 'numTestedMethods' => $node->getNumTestedFunctionsAndMethods(),\n 'linesExecutedPercent' => $node->getLineExecutedPercent(false),\n 'linesExecutedPercentAsString' => $node->getLineExecutedPercent(),\n 'numExecutedLines' => $node->getNumExecutedLines(),\n 'numExecutableLines' => $node->getNumExecutableLines(),\n 'testedMethodsPercent' => $node->getTestedFunctionsAndMethodsPercent(false),\n 'testedMethodsPercentAsString' => $node->getTestedFunctionsAndMethodsPercent(),\n 'testedClassesPercent' => $node->getTestedClassesAndTraitsPercent(false),\n 'testedClassesPercentAsString' => $node->getTestedClassesAndTraitsPercent()\n ];\n\n if ($total) {\n $data['name'] = 'Total';\n } else {\n if ($node instanceof DirectoryNode) {\n $data['name'] = \\sprintf(\n '%s',\n $node->getName(),\n $node->getName()\n );\n\n $data['icon'] = ' ';\n } else {\n $data['name'] = \\sprintf(\n '%s',\n $node->getName(),\n $node->getName()\n );\n\n $data['icon'] = ' ';\n }\n }\n\n return $this->renderItemTemplate(\n new \\Text_Template($this->templatePath . 'directory_item.html', '{{', '}}'),\n $data\n );\n }", "docstring": "/**\n * @param Node $node\n * @param bool $total\n *\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/php-code-coverage/src/Report/Html/Renderer/Directory.php#L57-L100", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NotExistingCoveredElementTest.testThree", "code": "public function testThree()\n {\n }", "docstring": "/**\n * @covers NotExistingClass::\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/php-code-coverage/tests/_files/NotExistingCoveredElementTest.php#L23-L25", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TestSuite.setBeStrictAboutChangesToGlobalState", "code": "public function setBeStrictAboutChangesToGlobalState($beStrictAboutChangesToGlobalState)\n {\n if (null === $this->beStrictAboutChangesToGlobalState && \\is_bool($beStrictAboutChangesToGlobalState)) {\n $this->beStrictAboutChangesToGlobalState = $beStrictAboutChangesToGlobalState;\n }\n }", "docstring": "/**\n * @param bool $beStrictAboutChangesToGlobalState\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/src/Framework/TestSuite.php#L952-L957", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "assertXmlFileEqualsXmlFile", "code": "function assertXmlFileEqualsXmlFile($expectedFile, $actualFile, $message = '')\n{\n return Assert::assertXmlFileEqualsXmlFile(...\\func_get_args());\n}", "docstring": "/**\n * Asserts that two XML files are equal.\n *\n * @param string $expectedFile\n * @param string $actualFile\n * @param string $message\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/src/Framework/Assert/Functions.php#L1201-L1204", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Composite.__construct", "code": "public function __construct(Constraint $innerConstraint)\n {\n parent::__construct();\n $this->innerConstraint = $innerConstraint;\n }", "docstring": "/**\n * @param Constraint $innerConstraint\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/src/Framework/Constraint/Composite.php#L24-L28", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ResultPrinter.printIncompletes", "code": "protected function printIncompletes(TestResult $result)\n {\n $this->printDefects($result->notImplemented(), 'incomplete test');\n }", "docstring": "/**\n * @param TestResult $result\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/src/TextUI/ResultPrinter.php#L314-L317", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RequirementsTest.testExtensionVersionOperatorLessThanEquals", "code": "public function testExtensionVersionOperatorLessThanEquals()\n {\n }", "docstring": "/**\n * @requires extension testExtOne <= 1.0\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/tests/_files/RequirementsTest.php#L311-L313", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ClonedDependencyTest.testThree", "code": "public function testThree($dependency)\n {\n $this->assertSame(self::$dependency, $dependency);\n }", "docstring": "/**\n * @depends !clone testOne\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/tests/_files/ClonedDependencyTest.php#L31-L34", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AssertTest.testAssertEqualsSucceeds", "code": "public function testAssertEqualsSucceeds($a, $b, $delta = 0.0, $canonicalize = false, $ignoreCase = false)\n {\n $this->assertEquals($a, $b, '', $delta, 10, $canonicalize, $ignoreCase);\n }", "docstring": "/**\n * @dataProvider equalProvider\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/tests/unit/Framework/AssertTest.php#L805-L808", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AssertTest.validInvalidJsonDataprovider", "code": "public static function validInvalidJsonDataprovider()\n {\n return [\n 'error syntax in expected JSON' => ['{\"Mascott\"::}', '{\"Mascott\" : \"Tux\"}'],\n 'error UTF-8 in actual JSON' => ['{\"Mascott\" : \"Tux\"}', '{\"Mascott\" : :}'],\n ];\n }", "docstring": "/**\n * @return array\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/tests/unit/Framework/AssertTest.php#L3172-L3178", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ConfigurationTest.assertConfigurationEquals", "code": "protected function assertConfigurationEquals(Configuration $expectedConfiguration, Configuration $actualConfiguration)\n {\n $this->assertEquals(\n $expectedConfiguration->getFilterConfiguration(),\n $actualConfiguration->getFilterConfiguration()\n );\n\n $this->assertEquals(\n $expectedConfiguration->getGroupConfiguration(),\n $actualConfiguration->getGroupConfiguration()\n );\n\n $this->assertEquals(\n $expectedConfiguration->getListenerConfiguration(),\n $actualConfiguration->getListenerConfiguration()\n );\n\n $this->assertEquals(\n $expectedConfiguration->getLoggingConfiguration(),\n $actualConfiguration->getLoggingConfiguration()\n );\n\n $this->assertEquals(\n $expectedConfiguration->getPHPConfiguration(),\n $actualConfiguration->getPHPConfiguration()\n );\n\n $this->assertEquals(\n $expectedConfiguration->getPHPUnitConfiguration(),\n $actualConfiguration->getPHPUnitConfiguration()\n );\n\n $this->assertEquals(\n $expectedConfiguration->getTestSuiteConfiguration()->tests(),\n $actualConfiguration->getTestSuiteConfiguration()->tests()\n );\n }", "docstring": "/**\n * Asserts that the values in $actualConfiguration equal $expectedConfiguration.\n *\n * @param Configuration $expectedConfiguration\n * @param Configuration $actualConfiguration\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/phpunit/phpunit/tests/unit/Util/ConfigurationTest.php#L421-L457", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NullLogger.log", "code": "public function log($level, $message, array $context = array())\n {\n // noop\n }", "docstring": "/**\n * Logs with an arbitrary level.\n *\n * @param mixed $level\n * @param string $message\n * @param array $context\n *\n * @return void\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/psr/log/Psr/Log/NullLogger.php#L24-L27", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Configuration.getReadline", "code": "public function getReadline()\n {\n if (!isset($this->readline)) {\n $className = $this->getReadlineClass();\n $this->readline = new $className(\n $this->getHistoryFile(),\n $this->getHistorySize(),\n $this->getEraseDuplicates()\n );\n }\n\n return $this->readline;\n }", "docstring": "/**\n * Get the Psy Shell readline service.\n *\n * By default, this service uses (in order of preference):\n *\n * * GNU Readline\n * * Libedit\n * * A transient array-based readline emulation.\n *\n * @return Readline\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/psy/psysh/src/Configuration.php#L537-L549", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CalledClassPass.enterNode", "code": "public function enterNode(Node $node)\n {\n if ($node instanceof Class_ || $node instanceof Trait_) {\n $this->inClass = true;\n } elseif ($node instanceof FuncCall && !$this->inClass) {\n // We'll give any args at all (besides null) a pass.\n // Technically we should be checking whether the args are objects, but this will do for now.\n //\n // @todo switch this to actually validate args when we get context-aware code cleaner passes.\n if (!empty($node->args) && !$this->isNull($node->args[0])) {\n return;\n }\n\n // We'll ignore name expressions as well (things like `$foo()`)\n if (!($node->name instanceof Name)) {\n return;\n }\n\n $name = \\strtolower($node->name);\n if (\\in_array($name, ['get_class', 'get_called_class'])) {\n $msg = \\sprintf('%s() called without object from outside a class', $name);\n throw new ErrorException($msg, 0, E_USER_WARNING, null, $node->getLine());\n }\n }\n }", "docstring": "/**\n * @throws ErrorException if get_class or get_called_class is called without an object from outside a class\n *\n * @param Node $node\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/psy/psysh/src/CodeCleaner/CalledClassPass.php#L43-L67", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ValidClassNamePass.ensureClassOrInterfaceExists", "code": "protected function ensureClassOrInterfaceExists($name, $stmt)\n {\n if (!$this->classExists($name) && !$this->interfaceExists($name)) {\n throw $this->createError(\\sprintf('Class \\'%s\\' not found', $name), $stmt);\n }\n }", "docstring": "/**\n * Ensure that a referenced class _or interface_ exists.\n *\n * @throws FatalErrorException\n *\n * @param string $name\n * @param Stmt $stmt\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/psy/psysh/src/CodeCleaner/ValidClassNamePass.php#L244-L249", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Str.unvis", "code": "public static function unvis($input)\n {\n $output = \\preg_replace_callback(self::UNVIS_RX, 'self::unvisReplace', $input);\n // other escapes & octal are handled by stripcslashes\n return \\stripcslashes($output);\n }", "docstring": "/**\n * Decodes a string encoded by libsd's strvis.\n *\n * From `man 3 vis`:\n *\n * Use an ‘M’ to represent meta characters (characters with the 8th bit set),\n * and use a caret ‘^’ to represent control characters (see iscntrl(3)).\n * The following formats are used:\n *\n * \\040 Represents ASCII space.\n *\n * \\240 Represents Meta-space (  in HTML).\n *\n * \\M-C Represents character ‘C’ with the 8th bit set.\n * Spans characters ‘\\241’ through ‘\\376’.\n *\n * \\M^C Represents control character ‘C’ with the 8th bit set.\n * Spans characters ‘\\200’ through ‘\\237’, and ‘\\377’ (as ‘\\M^?’).\n *\n * \\^C Represents the control character ‘C’.\n * Spans characters ‘\\000’ through ‘\\037’, and ‘\\177’ (as ‘\\^?’).\n *\n * The other formats are supported by PHP's stripcslashes,\n * except for the \\s sequence (ASCII space).\n *\n * @param string $input The string to decode\n *\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/psy/psysh/src/Util/Str.php#L62-L67", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RandomNodeProvider.getNode", "code": "public function getNode()\n {\n $node = hexdec(bin2hex(random_bytes(6)));\n\n // Set the multicast bit; see RFC 4122, section 4.5.\n $node = $node | 0x010000000000;\n\n return str_pad(dechex($node), 12, '0', STR_PAD_LEFT);\n }", "docstring": "/**\n * Returns the system node ID\n *\n * @return string System node ID as a hexadecimal string\n * @throws \\Exception if it was not possible to gather sufficient entropy\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/ramsey/uuid/src/Provider/Node/RandomNodeProvider.php#L33-L41", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NumericComparatorTest.testAcceptsFails", "code": "public function testAcceptsFails($expected, $actual)\n {\n $this->assertFalse(\n $this->comparator->accepts($expected, $actual)\n );\n }", "docstring": "/**\n * @covers ::accepts\n * @dataProvider acceptsFailsProvider\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/sebastian/comparator/tests/NumericComparatorTest.php#L91-L96", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swift_CharacterStream_NgCharacterStream.setCharacterReaderFactory", "code": "public function setCharacterReaderFactory(Swift_CharacterReaderFactory $factory)\n {\n $this->charReaderFactory = $factory;\n }", "docstring": "/**\n * Set the CharacterReaderFactory for multi charset support.\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/swiftmailer/swiftmailer/lib/classes/Swift/CharacterStream/NgCharacterStream.php#L109-L112", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swift_KeyCache_DiskKeyCache.clearAll", "code": "public function clearAll($nsKey)\n {\n if (array_key_exists($nsKey, $this->keys)) {\n foreach ($this->keys[$nsKey] as $itemKey => $null) {\n $this->clearKey($nsKey, $itemKey);\n }\n if (is_dir($this->path.'/'.$nsKey)) {\n rmdir($this->path.'/'.$nsKey);\n }\n unset($this->keys[$nsKey]);\n }\n }", "docstring": "/**\n * Clear all data in the namespace $nsKey if it exists.\n *\n * @param string $nsKey\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/swiftmailer/swiftmailer/lib/classes/Swift/KeyCache/DiskKeyCache.php#L225-L236", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swift_Mime_SimpleHeaderSet.addParameterizedHeader", "code": "public function addParameterizedHeader($name, $value = null, $params = [])\n {\n $this->storeHeader($name, $this->factory->createParameterizedHeader($name, $value, $params));\n }", "docstring": "/**\n * Add a new ParameterizedHeader with $name, $value and $params.\n *\n * @param string $name\n * @param string $value\n * @param array $params\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/swiftmailer/swiftmailer/lib/classes/Swift/Mime/SimpleHeaderSet.php#L102-L105", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swift_Plugins_LoggerPlugin.beforeTransportStopped", "code": "public function beforeTransportStopped(Swift_Events_TransportChangeEvent $evt)\n {\n $transportName = get_class($evt->getSource());\n $this->logger->add(sprintf('++ Stopping %s', $transportName));\n }", "docstring": "/**\n * Invoked just before a Transport is stopped.\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/swiftmailer/swiftmailer/lib/classes/Swift/Plugins/LoggerPlugin.php#L96-L100", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swift_Signers_SMimeSigner.__construct", "code": "public function __construct($signCertificate = null, $signPrivateKey = null, $encryptCertificate = null)\n {\n if (null !== $signPrivateKey) {\n $this->setSignCertificate($signCertificate, $signPrivateKey);\n }\n\n if (null !== $encryptCertificate) {\n $this->setEncryptCertificate($encryptCertificate);\n }\n\n $this->replacementFactory = Swift_DependencyContainer::getInstance()\n ->lookup('transport.replacementfactory');\n\n $this->signOptions = PKCS7_DETACHED;\n $this->encryptCipher = OPENSSL_CIPHER_AES_128_CBC;\n }", "docstring": "/**\n * Constructor.\n *\n * @param string|null $signCertificate\n * @param string|null $signPrivateKey\n * @param string|null $encryptCertificate\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/swiftmailer/swiftmailer/lib/classes/Swift/Signers/SMimeSigner.php#L49-L64", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swift_Transport_EsmtpTransport.doMailFromCommand", "code": "protected function doMailFromCommand($address)\n {\n $address = $this->addressEncoder->encodeString($address);\n $handlers = $this->getActiveHandlers();\n $params = [];\n foreach ($handlers as $handler) {\n $params = array_merge($params, (array) $handler->getMailParams());\n }\n $paramStr = !empty($params) ? ' '.implode(' ', $params) : '';\n $this->executeCommand(\n sprintf(\"MAIL FROM:<%s>%s\\r\\n\", $address, $paramStr), [250], $failures, true\n );\n }", "docstring": "/** Overridden to add Extension support */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/swiftmailer/swiftmailer/lib/classes/Swift/Transport/EsmtpTransport.php#L376-L388", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swift_Transport_LoadBalancedTransport.getLastUsedTransport", "code": "public function getLastUsedTransport()\n {\n return $this->lastUsedTransport;\n }", "docstring": "/**\n * Get the Transport used in the last successful send operation.\n *\n * @return Swift_Transport\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/swiftmailer/swiftmailer/lib/classes/Swift/Transport/LoadBalancedTransport.php#L70-L73", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swift_Transport_Esmtp_EightBitMimeHandler.getHandledKeyword", "code": "public function getHandledKeyword()\n {\n return '8BITMIME';\n }", "docstring": "/**\n * Get the name of the ESMTP extension this handles.\n *\n * @return string\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/swiftmailer/swiftmailer/lib/classes/Swift/Transport/Esmtp/EightBitMimeHandler.php#L41-L44", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Output.getFormatter", "code": "public function getFormatter()\n {\n return $this->formatter;\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/console/Output/Output.php#L58-L61", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OutputStyle.isDecorated", "code": "public function isDecorated()\n {\n return $this->output->isDecorated();\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/console/Style/OutputStyle.php#L94-L97", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SymfonyStyle.askQuestion", "code": "public function askQuestion(Question $question)\n {\n if ($this->input->isInteractive()) {\n $this->autoPrependBlock();\n }\n\n if (!$this->questionHelper) {\n $this->questionHelper = new SymfonyQuestionHelper();\n }\n\n $answer = $this->questionHelper->ask($this->input, $this, $question);\n\n if ($this->input->isInteractive()) {\n $this->newLine();\n $this->bufferedOutput->write(\"\\n\");\n }\n\n return $answer;\n }", "docstring": "/**\n * @return mixed\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/console/Style/SymfonyStyle.php#L284-L302", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CombinedSelectorNode.getSpecificity", "code": "public function getSpecificity()\n {\n return $this->selector->getSpecificity()->plus($this->subSelector->getSpecificity());\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/css-selector/Node/CombinedSelectorNode.php#L69-L72", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HashNode.getSelector", "code": "public function getSelector()\n {\n return $this->selector;\n }", "docstring": "/**\n * @return NodeInterface\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/css-selector/Node/HashNode.php#L42-L45", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CssSelectorConverterTest.testParseExceptions", "code": "public function testParseExceptions()\n {\n $converter = new CssSelectorConverter();\n $converter->toXPath('h1:');\n }", "docstring": "/**\n * @expectedException \\Symfony\\Component\\CssSelector\\Exception\\ParseException\n * @expectedExceptionMessage Expected identifier, but found.\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/css-selector/Tests/CssSelectorConverterTest.php#L42-L46", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Translator.getExtension", "code": "public function getExtension($name)\n {\n if (!isset($this->extensions[$name])) {\n throw new ExpressionErrorException(sprintf('Extension \"%s\" not registered.', $name));\n }\n\n return $this->extensions[$name];\n }", "docstring": "/**\n * @param string $name\n *\n * @return Extension\\ExtensionInterface\n *\n * @throws ExpressionErrorException\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/css-selector/XPath/Translator.php#L147-L154", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ContainerAwareEventDispatcher.addSubscriberService", "code": "public function addSubscriberService($serviceId, $class)\n {\n @trigger_error(sprintf('The %s class is deprecated since Symfony 3.3 and will be removed in 4.0. Use EventDispatcher with closure factories instead.', __CLASS__), E_USER_DEPRECATED);\n\n foreach ($class::getSubscribedEvents() as $eventName => $params) {\n if (\\is_string($params)) {\n $this->listenerIds[$eventName][] = array($serviceId, $params, 0);\n } elseif (\\is_string($params[0])) {\n $this->listenerIds[$eventName][] = array($serviceId, $params[0], isset($params[1]) ? $params[1] : 0);\n } else {\n foreach ($params as $listener) {\n $this->listenerIds[$eventName][] = array($serviceId, $listener[0], isset($listener[1]) ? $listener[1] : 0);\n }\n }\n }\n }", "docstring": "/**\n * Adds a service as event subscriber.\n *\n * @param string $serviceId The service ID of the subscriber service\n * @param string $class The service's class name (which must implement EventSubscriberInterface)\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/event-dispatcher/ContainerAwareEventDispatcher.php#L147-L162", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HeaderBag.__toString", "code": "public function __toString()\n {\n if (!$headers = $this->all()) {\n return '';\n }\n\n ksort($headers);\n $max = max(array_map('strlen', array_keys($headers))) + 1;\n $content = '';\n foreach ($headers as $name => $values) {\n $name = implode('-', array_map('ucfirst', explode('-', $name)));\n foreach ($values as $value) {\n $content .= sprintf(\"%-{$max}s %s\\r\\n\", $name.':', $value);\n }\n }\n\n return $content;\n }", "docstring": "/**\n * Returns the headers as a string.\n *\n * @return string The headers\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/http-foundation/HeaderBag.php#L39-L56", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HeaderBag.getIterator", "code": "public function getIterator()\n {\n return new \\ArrayIterator($this->headers);\n }", "docstring": "/**\n * Returns an iterator for headers.\n *\n * @return \\ArrayIterator An \\ArrayIterator instance\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/http-foundation/HeaderBag.php#L280-L283", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Session.set", "code": "public function set($name, $value)\n {\n $this->getAttributeBag()->set($name, $value);\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/http-foundation/Session/Session.php#L79-L82", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FlashBag.all", "code": "public function all()\n {\n $return = $this->peekAll();\n $this->flashes = array();\n\n return $return;\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/http-foundation/Session/Flash/FlashBag.php#L97-L103", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SessionHandlerProxy.updateTimestamp", "code": "public function updateTimestamp($sessionId, $data)\n {\n return $this->handler instanceof \\SessionUpdateTimestampHandlerInterface ? $this->handler->updateTimestamp($sessionId, $data) : $this->write($sessionId, $data);\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/http-foundation/Session/Storage/Proxy/SessionHandlerProxy.php#L97-L100", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Kernel.getCharset", "code": "public function getCharset()\n {\n return 'UTF-8';\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/http-kernel/Kernel.php#L450-L453", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ResettableServicePass.process", "code": "public function process(ContainerBuilder $container)\n {\n if (!$container->has('services_resetter')) {\n return;\n }\n\n $services = $methods = array();\n\n foreach ($container->findTaggedServiceIds($this->tagName, true) as $id => $tags) {\n $services[$id] = new Reference($id, ContainerInterface::IGNORE_ON_UNINITIALIZED_REFERENCE);\n $attributes = $tags[0];\n\n if (!isset($attributes['method'])) {\n throw new RuntimeException(sprintf('Tag %s requires the \"method\" attribute to be set.', $this->tagName));\n }\n\n $methods[$id] = $attributes['method'];\n }\n\n if (empty($services)) {\n $container->removeAlias('services_resetter');\n $container->removeDefinition('services_resetter');\n\n return;\n }\n\n $container->findDefinition('services_resetter')\n ->setArgument(0, new IteratorArgument($services))\n ->setArgument(1, $methods);\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/http-kernel/DependencyInjection/ResettableServicePass.php#L36-L65", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "KernelTest.testLocateResourceReturnsAllMatchesBis", "code": "public function testLocateResourceReturnsAllMatchesBis()\n {\n $kernel = $this->getKernel(array('getBundle'));\n $kernel\n ->expects($this->once())\n ->method('getBundle')\n ->will($this->returnValue(array(\n $this->getBundle(__DIR__.'/Fixtures/Bundle1Bundle'),\n $this->getBundle(__DIR__.'/Foobar'),\n )))\n ;\n\n $this->assertEquals(\n array(__DIR__.'/Fixtures/Bundle1Bundle/foo.txt'),\n $kernel->locateResource('@Bundle1Bundle/foo.txt', null, false)\n );\n }", "docstring": "/**\n * @group legacy\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/http-kernel/Tests/KernelTest.php#L485-L501", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProcessTest.testNegativeTimeoutFromSetter", "code": "public function testNegativeTimeoutFromSetter()\n {\n $p = $this->getProcess('');\n $p->setTimeout(-1);\n }", "docstring": "/**\n * @expectedException \\Symfony\\Component\\Process\\Exception\\InvalidArgumentException\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/process/Tests/ProcessTest.php#L93-L97", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CompiledRoute.getRegex", "code": "public function getRegex()\n {\n return $this->regex;\n }", "docstring": "/**\n * Returns the regex.\n *\n * @return string The regex\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/routing/CompiledRoute.php#L105-L108", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Route.getCondition", "code": "public function getCondition()\n {\n return $this->condition;\n }", "docstring": "/**\n * Returns the condition.\n *\n * @return string The condition\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/routing/Route.php#L495-L498", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HtmlDumper.setStyles", "code": "public function setStyles(array $styles)\n {\n $this->headerIsDumped = false;\n $this->styles = $styles + $this->styles;\n }", "docstring": "/**\n * {@inheritdoc}\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/var-dumper/Dumper/HtmlDumper.php#L69-L73", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DateCasterTest.testCastDateTime", "code": "public function testCastDateTime($time, $timezone, $xDate, $xTimestamp, $xInfos)\n {\n if ((\\defined('HHVM_VERSION_ID') || \\PHP_VERSION_ID <= 50509) && preg_match('/[-+]\\d{2}:\\d{2}/', $timezone)) {\n $this->markTestSkipped('DateTimeZone GMT offsets are supported since 5.5.10. See https://github.com/facebook/hhvm/issues/5875 for HHVM.');\n }\n\n $stub = new Stub();\n $date = new \\DateTime($time, new \\DateTimeZone($timezone));\n $cast = DateCaster::castDateTime($date, array('foo' => 'bar'), $stub, false, 0);\n\n $xDump = << $xDate\n]\nEODUMP;\n\n $this->assertDumpEquals($xDump, $cast);\n\n $xDump = <<assertDumpMatchesFormat($xDump, $cast[\"\\0~\\0date\"]);\n }", "docstring": "/**\n * @dataProvider provideDateTimes\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/symfony/var-dumper/Tests/Caster/DateCasterTest.php#L50-L82", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Token.getLine", "code": "public function getLine(): int {\n return $this->line;\n }", "docstring": "/**\n * @return int\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/vendor/theseer/tokenizer/src/Token.php#L37-L39", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BenchmarkVector", "code": "void BenchmarkVector()\n{\n\tEASTLTest_Printf(\"Vector\\n\");\n\n\tEA::UnitTest::RandGenT rng(EA::UnitTest::GetRandSeed());\n\tEA::StdC::Stopwatch stopwatch1(EA::StdC::Stopwatch::kUnitsCPUCycles);\n\tEA::StdC::Stopwatch stopwatch2(EA::StdC::Stopwatch::kUnitsCPUCycles);\n\n\t{\n\t\teastl::vector intVector(100000);\n\t\teastl::generate(intVector.begin(), intVector.end(), rng);\n\n\t\tfor(int i = 0; i < 2; i++)\n\t\t{\n\t\t\tStdVectorUint64 stdVectorUint64;\n\t\t\tEaVectorUint64 eaVectorUint64;\n\n\n\t\t\t///////////////////////////////\n\t\t\t// Test push_back\n\t\t\t///////////////////////////////\n\n\t\t\tTestPushBack(stopwatch1, stdVectorUint64, intVector);\n\t\t\tTestPushBack(stopwatch2, eaVectorUint64, intVector);\n\n\t\t\tif(i == 1)\n\t\t\t\tBenchmark::AddResult(\"vector/push_back\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\n\n\t\t\t///////////////////////////////\n\t\t\t// Test operator[].\n\t\t\t///////////////////////////////\n\n\t\t\tTestBracket(stopwatch1, stdVectorUint64);\n\t\t\tTestBracket(stopwatch2, eaVectorUint64);\n\n\t\t\tif(i == 1)\n\t\t\t\tBenchmark::AddResult(\"vector/operator[]\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\n\n\t\t\t///////////////////////////////\n\t\t\t// Test iteration via find().\n\t\t\t///////////////////////////////\n\n\t\t\tTestFind(stopwatch1, stdVectorUint64);\n\t\t\tTestFind(stopwatch2, eaVectorUint64);\n\t\t\tTestFind(stopwatch1, stdVectorUint64);\n\t\t\tTestFind(stopwatch2, eaVectorUint64);\n\n\t\t\tif(i == 1)\n\t\t\t\tBenchmark::AddResult(\"vector/iteration\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\n\n\t\t\t///////////////////////////////\n\t\t\t// Test sort\n\t\t\t///////////////////////////////\n\n\t\t\t// Currently VC++ complains about our sort function decrementing std::iterator that is already at begin(). In the strictest sense,\n\t\t\t// that's a valid complaint, but we aren't testing std STL here. We will want to revise our sort function eventually.\n\t\t\t#if !defined(_MSC_VER) || !defined(_ITERATOR_DEBUG_LEVEL) || (_ITERATOR_DEBUG_LEVEL < 2)\n\t\t\t\tTestSort(stopwatch1, stdVectorUint64);\n\t\t\t\tTestSort(stopwatch2, eaVectorUint64);\n\n\t\t\t\tif(i == 1)\n\t\t\t\t\tBenchmark::AddResult(\"vector/sort\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\t\t\t#endif\n\n\t\t\t///////////////////////////////\n\t\t\t// Test insert\n\t\t\t///////////////////////////////\n\n\t\t\tTestInsert(stopwatch1, stdVectorUint64);\n\t\t\tTestInsert(stopwatch2, eaVectorUint64);\n\n\t\t\tif(i == 1)\n\t\t\t\tBenchmark::AddResult(\"vector/insert\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\n\n\t\t\t///////////////////////////////\n\t\t\t// Test erase\n\t\t\t///////////////////////////////\n\n\t\t\tTestErase(stopwatch1, stdVectorUint64);\n\t\t\tTestErase(stopwatch2, eaVectorUint64);\n\n\t\t\tif(i == 1)\n\t\t\t\tBenchmark::AddResult(\"vector/erase\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\n\n\t\t\t///////////////////////////////////////////\n\t\t\t// Test move of MovableType\n\t\t\t// Should be much faster with C++11 move.\n\t\t\t///////////////////////////////////////////\n\n\t\t\tstd::vector stdVectorMovableType;\n\t\t\teastl::vector eaVectorMovableType;\n\n\t\t\tTestMoveReallocate(stopwatch1, stdVectorMovableType);\n\t\t\tTestMoveReallocate(stopwatch2, eaVectorMovableType);\n\n\t\t\tif(i == 1)\n\t\t\t\tBenchmark::AddResult(\"vector/reallocate\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\n\n\t\t\tTestMoveErase(stopwatch1, stdVectorMovableType);\n\t\t\tTestMoveErase(stopwatch2, eaVectorMovableType);\n\n\t\t\tif(i == 1)\n\t\t\t\tBenchmark::AddResult(\"vector/erase\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\n\n\t\t\t///////////////////////////////////////////\n\t\t\t// Test move of AutoRefCount\n\t\t\t// Should be much faster with C++11 move.\n\t\t\t///////////////////////////////////////////\n\n\t\t\tstd::vector > stdVectorAutoRefCount;\n\t\t\teastl::vector > eaVectorAutoRefCount;\n\n\t\t\tfor(size_t a = 0; a < 2048; a++)\n\t\t\t{\n\t\t\t\tstdVectorAutoRefCount.push_back(AutoRefCount(new RefCounted));\n\t\t\t\teaVectorAutoRefCount.push_back(AutoRefCount(new RefCounted));\n\t\t\t}\n\n\t\t\tRefCounted::msAddRefCount = 0;\n\t\t\tRefCounted::msReleaseCount = 0;\n\t\t\tTestMoveErase(stopwatch1, stdVectorAutoRefCount);\n\t\t\tEASTLTest_Printf(\"vector/erase std counts: %d %d\\n\", RefCounted::msAddRefCount, RefCounted::msReleaseCount);\n\n\t\t\tRefCounted::msAddRefCount = 0;\n\t\t\tRefCounted::msReleaseCount = 0;\n\t\t\tTestMoveErase(stopwatch2, eaVectorAutoRefCount);\n\t\t\tEASTLTest_Printf(\"vector/erase EA counts: %d %d\\n\", RefCounted::msAddRefCount, RefCounted::msReleaseCount);\n\n\t\t\tif(i == 1)\n\t\t\t\tBenchmark::AddResult(\"vector/erase\", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());\n\t\t}\n\t}\n}", "docstring": "// namespace", "url": "https://github.com/vitoplantamura/BugChecker/blob/8b81e76efe457b59be3a6e752efd43916ba0cabb/dependencies/EASTL/benchmark/source/BenchmarkVector.cpp#L301-L440", "sha": "8b81e76efe457b59be3a6e752efd43916ba0cabb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Solution.wordBreak", "code": "bool wordBreak(string s, vector < string >& wordDict) {\n // Base case: an empty string can always be segmented (as no word is needed).\n if (s.empty()) return true;\n \n // If the result for the current 's' is already computed, return it to avoid recomputation.\n if (mp.find(s) != mp.end()) return mp[s];\n \n // Get the size of the word dictionary to use it in the loop.\n int n = wordDict.size();\n \n // Loop through the words in the dictionary to find a match with the beginning of 's'.\n for (int i = 0; i < n; i++) {\n // Check if the current word from the dictionary matches the start of 's'.\n if (wordDict[i] == s.substr(0, wordDict[i].length())) {\n // If there is a match, recursively check the rest of the string after removing the matched word.\n bool check = wordBreak(s.substr(wordDict[i].length()), wordDict);\n \n // If the rest of the string can be segmented, update the result for the current 's' to true.\n if (check)\n return mp[s] = true;\n }\n }\n \n // If no match is found, update the result for the current 's' to false.\n return mp[s] = false;\n }", "docstring": "// The wordBreak function takes a string 's' and a vector of strings 'wordDict'.\n// It returns true if the string 's' can be segmented into words from 'wordDict', false otherwise.", "url": "https://github.com/7oSkaaa/LeetCode_DailyChallenge_2023/blob/e00960b9f5cbfaf813572c366e16d4fef5fa6b8d/08- August/04- Word Break/04- Word Break (Ahmed Hossam).cpp#L9-L34", "sha": "e00960b9f5cbfaf813572c366e16d4fef5fa6b8d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MurmurHash3_x86_128", "code": "void MurmurHash3_x86_128( const void* key, const int len,\n\tuint32_t seed, void* out )\n{\n\tconst uint8_t* data = (const uint8_t*)key;\n\tconst int nblocks = len / 16;\n\n\tuint32_t h1 = seed;\n\tuint32_t h2 = seed;\n\tuint32_t h3 = seed;\n\tuint32_t h4 = seed;\n\n\tconst uint32_t c1 = 0x239b961b;\n\tconst uint32_t c2 = 0xab0e9789;\n\tconst uint32_t c3 = 0x38b34ae5;\n\tconst uint32_t c4 = 0xa1e38b93;\n\n\t//----------\n\t// body\n\n\tconst uint32_t* blocks = (const uint32_t*)( data + nblocks * 16 );\n\n\tfor( int i = -nblocks; i; i++ )\n\t{\n\t\tuint32_t k1 = getblock32( blocks, i * 4 + 0 );\n\t\tuint32_t k2 = getblock32( blocks, i * 4 + 1 );\n\t\tuint32_t k3 = getblock32( blocks, i * 4 + 2 );\n\t\tuint32_t k4 = getblock32( blocks, i * 4 + 3 );\n\n\t\tk1 *= c1; k1 = ROTL32( k1, 15 ); k1 *= c2; h1 ^= k1;\n\n\t\th1 = ROTL32( h1, 19 ); h1 += h2; h1 = h1 * 5 + 0x561ccd1b;\n\n\t\tk2 *= c2; k2 = ROTL32( k2, 16 ); k2 *= c3; h2 ^= k2;\n\n\t\th2 = ROTL32( h2, 17 ); h2 += h3; h2 = h2 * 5 + 0x0bcaa747;\n\n\t\tk3 *= c3; k3 = ROTL32( k3, 17 ); k3 *= c4; h3 ^= k3;\n\n\t\th3 = ROTL32( h3, 15 ); h3 += h4; h3 = h3 * 5 + 0x96cd1c35;\n\n\t\tk4 *= c4; k4 = ROTL32( k4, 18 ); k4 *= c1; h4 ^= k4;\n\n\t\th4 = ROTL32( h4, 13 ); h4 += h1; h4 = h4 * 5 + 0x32ac3b17;\n\t}\n\n\t//----------\n\t// tail\n\n\tconst uint8_t* tail = (const uint8_t*)( data + nblocks * 16 );\n\n\tuint32_t k1 = 0;\n\tuint32_t k2 = 0;\n\tuint32_t k3 = 0;\n\tuint32_t k4 = 0;\n\n\tswitch( len & 15 )\n\t{\n\tcase 15: k4 ^= tail[ 14 ] << 16;\n\tcase 14: k4 ^= tail[ 13 ] << 8;\n\tcase 13: k4 ^= tail[ 12 ] << 0;\n\t\tk4 *= c4; k4 = ROTL32( k4, 18 ); k4 *= c1; h4 ^= k4;\n\n\tcase 12: k3 ^= tail[ 11 ] << 24;\n\tcase 11: k3 ^= tail[ 10 ] << 16;\n\tcase 10: k3 ^= tail[ 9 ] << 8;\n\tcase 9: k3 ^= tail[ 8 ] << 0;\n\t\tk3 *= c3; k3 = ROTL32( k3, 17 ); k3 *= c4; h3 ^= k3;\n\n\tcase 8: k2 ^= tail[ 7 ] << 24;\n\tcase 7: k2 ^= tail[ 6 ] << 16;\n\tcase 6: k2 ^= tail[ 5 ] << 8;\n\tcase 5: k2 ^= tail[ 4 ] << 0;\n\t\tk2 *= c2; k2 = ROTL32( k2, 16 ); k2 *= c3; h2 ^= k2;\n\n\tcase 4: k1 ^= tail[ 3 ] << 24;\n\tcase 3: k1 ^= tail[ 2 ] << 16;\n\tcase 2: k1 ^= tail[ 1 ] << 8;\n\tcase 1: k1 ^= tail[ 0 ] << 0;\n\t\tk1 *= c1; k1 = ROTL32( k1, 15 ); k1 *= c2; h1 ^= k1;\n\t};\n\n\t//----------\n\t// finalization\n\n\th1 ^= len; h2 ^= len; h3 ^= len; h4 ^= len;\n\n\th1 += h2; h1 += h3; h1 += h4;\n\th2 += h1; h3 += h1; h4 += h1;\n\n\th1 = fmix32( h1 );\n\th2 = fmix32( h2 );\n\th3 = fmix32( h3 );\n\th4 = fmix32( h4 );\n\n\th1 += h2; h1 += h3; h1 += h4;\n\th2 += h1; h3 += h1; h4 += h1;\n\n\t( (uint32_t*)out )[ 0 ] = h1;\n\t( (uint32_t*)out )[ 1 ] = h2;\n\t( (uint32_t*)out )[ 2 ] = h3;\n\t( (uint32_t*)out )[ 3 ] = h4;\n}", "docstring": "//-----------------------------------------------------------------------------", "url": "https://github.com/Const-me/Whisper/blob/306aadd1fce4b168cd38659236f4ba7c1603cebd/Whisper/Utils/MurmurHash3.cpp#L150-L251", "sha": "306aadd1fce4b168cd38659236f4ba7c1603cebd"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGui_ImplWin32_VirtualKeyToImGuiKey", "code": "static ImGuiKey ImGui_ImplWin32_VirtualKeyToImGuiKey(WPARAM wParam)\n{\n switch (wParam)\n {\n case VK_TAB: return ImGuiKey_Tab;\n case VK_LEFT: return ImGuiKey_LeftArrow;\n case VK_RIGHT: return ImGuiKey_RightArrow;\n case VK_UP: return ImGuiKey_UpArrow;\n case VK_DOWN: return ImGuiKey_DownArrow;\n case VK_PRIOR: return ImGuiKey_PageUp;\n case VK_NEXT: return ImGuiKey_PageDown;\n case VK_HOME: return ImGuiKey_Home;\n case VK_END: return ImGuiKey_End;\n case VK_INSERT: return ImGuiKey_Insert;\n case VK_DELETE: return ImGuiKey_Delete;\n case VK_BACK: return ImGuiKey_Backspace;\n case VK_SPACE: return ImGuiKey_Space;\n case VK_RETURN: return ImGuiKey_Enter;\n case VK_ESCAPE: return ImGuiKey_Escape;\n case VK_OEM_7: return ImGuiKey_Apostrophe;\n case VK_OEM_COMMA: return ImGuiKey_Comma;\n case VK_OEM_MINUS: return ImGuiKey_Minus;\n case VK_OEM_PERIOD: return ImGuiKey_Period;\n case VK_OEM_2: return ImGuiKey_Slash;\n case VK_OEM_1: return ImGuiKey_Semicolon;\n case VK_OEM_PLUS: return ImGuiKey_Equal;\n case VK_OEM_4: return ImGuiKey_LeftBracket;\n case VK_OEM_5: return ImGuiKey_Backslash;\n case VK_OEM_6: return ImGuiKey_RightBracket;\n case VK_OEM_3: return ImGuiKey_GraveAccent;\n case VK_CAPITAL: return ImGuiKey_CapsLock;\n case VK_SCROLL: return ImGuiKey_ScrollLock;\n case VK_NUMLOCK: return ImGuiKey_NumLock;\n case VK_SNAPSHOT: return ImGuiKey_PrintScreen;\n case VK_PAUSE: return ImGuiKey_Pause;\n case VK_NUMPAD0: return ImGuiKey_Keypad0;\n case VK_NUMPAD1: return ImGuiKey_Keypad1;\n case VK_NUMPAD2: return ImGuiKey_Keypad2;\n case VK_NUMPAD3: return ImGuiKey_Keypad3;\n case VK_NUMPAD4: return ImGuiKey_Keypad4;\n case VK_NUMPAD5: return ImGuiKey_Keypad5;\n case VK_NUMPAD6: return ImGuiKey_Keypad6;\n case VK_NUMPAD7: return ImGuiKey_Keypad7;\n case VK_NUMPAD8: return ImGuiKey_Keypad8;\n case VK_NUMPAD9: return ImGuiKey_Keypad9;\n case VK_DECIMAL: return ImGuiKey_KeypadDecimal;\n case VK_DIVIDE: return ImGuiKey_KeypadDivide;\n case VK_MULTIPLY: return ImGuiKey_KeypadMultiply;\n case VK_SUBTRACT: return ImGuiKey_KeypadSubtract;\n case VK_ADD: return ImGuiKey_KeypadAdd;\n case IM_VK_KEYPAD_ENTER: return ImGuiKey_KeypadEnter;\n case VK_LSHIFT: return ImGuiKey_LeftShift;\n case VK_LCONTROL: return ImGuiKey_LeftCtrl;\n case VK_LMENU: return ImGuiKey_LeftAlt;\n case VK_LWIN: return ImGuiKey_LeftSuper;\n case VK_RSHIFT: return ImGuiKey_RightShift;\n case VK_RCONTROL: return ImGuiKey_RightCtrl;\n case VK_RMENU: return ImGuiKey_RightAlt;\n case VK_RWIN: return ImGuiKey_RightSuper;\n case VK_APPS: return ImGuiKey_Menu;\n case '0': return ImGuiKey_0;\n case '1': return ImGuiKey_1;\n case '2': return ImGuiKey_2;\n case '3': return ImGuiKey_3;\n case '4': return ImGuiKey_4;\n case '5': return ImGuiKey_5;\n case '6': return ImGuiKey_6;\n case '7': return ImGuiKey_7;\n case '8': return ImGuiKey_8;\n case '9': return ImGuiKey_9;\n case 'A': return ImGuiKey_A;\n case 'B': return ImGuiKey_B;\n case 'C': return ImGuiKey_C;\n case 'D': return ImGuiKey_D;\n case 'E': return ImGuiKey_E;\n case 'F': return ImGuiKey_F;\n case 'G': return ImGuiKey_G;\n case 'H': return ImGuiKey_H;\n case 'I': return ImGuiKey_I;\n case 'J': return ImGuiKey_J;\n case 'K': return ImGuiKey_K;\n case 'L': return ImGuiKey_L;\n case 'M': return ImGuiKey_M;\n case 'N': return ImGuiKey_N;\n case 'O': return ImGuiKey_O;\n case 'P': return ImGuiKey_P;\n case 'Q': return ImGuiKey_Q;\n case 'R': return ImGuiKey_R;\n case 'S': return ImGuiKey_S;\n case 'T': return ImGuiKey_T;\n case 'U': return ImGuiKey_U;\n case 'V': return ImGuiKey_V;\n case 'W': return ImGuiKey_W;\n case 'X': return ImGuiKey_X;\n case 'Y': return ImGuiKey_Y;\n case 'Z': return ImGuiKey_Z;\n case VK_F1: return ImGuiKey_F1;\n case VK_F2: return ImGuiKey_F2;\n case VK_F3: return ImGuiKey_F3;\n case VK_F4: return ImGuiKey_F4;\n case VK_F5: return ImGuiKey_F5;\n case VK_F6: return ImGuiKey_F6;\n case VK_F7: return ImGuiKey_F7;\n case VK_F8: return ImGuiKey_F8;\n case VK_F9: return ImGuiKey_F9;\n case VK_F10: return ImGuiKey_F10;\n case VK_F11: return ImGuiKey_F11;\n case VK_F12: return ImGuiKey_F12;\n default: return ImGuiKey_None;\n }\n}", "docstring": "// Map VK_xxx to ImGuiKey_xxx.", "url": "https://github.com/ddf8196/BetterRenderDragon/blob/fc4e663813e778365657a8a8aa7ad3f7c6559016/include/imgui/backends/imgui_impl_win32.cpp#L393-L503", "sha": "fc4e663813e778365657a8a8aa7ad3f7c6559016"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MouseCursor::updateMouseClipping", "code": "void MouseCursor::updateMouseClipping()\n{\n\tBW_GUARD;\n\tif (SimpleGUI::pInstance() != NULL)\n\t{\n\t\tclipCursorToWindow( SimpleGUI::instance().mouseCursor().clipped() );\n\t}\n}", "docstring": "/**\n *\tDoes a per-frame update of the mouse clipping. It is done per frame rather\n *\tthan just clipping/unclipping in setFocus because it allows nicer behaviour\n *\twhen refocusing the window with the mouse while the cursor is clicked.\n */\n/*static*/", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/ashes/mouse_cursor.cpp", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SimpleGUI::hwnd", "code": "void SimpleGUI::hwnd( void * h )\n{\n\thwnd_ = h;\n}", "docstring": "/**\n *\tThis method sets the HWND for the main application window.\n *\tSimpleGUI needs this only if the mouse cursor is to be used.\n *\n *\t@param h\tThe HWND for the application\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/ashes/simple_gui.cpp#L370-L373", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SimpleGUIComponent::handleDropEvent", "code": "bool SimpleGUIComponent::handleDropEvent( SimpleGUIComponent * dragged, \n\t\tconst KeyEvent & event )\n{\n\tBW_GUARD;\n\treturn this->invokeMouseEventHandler( \n\t\tthis->pScriptObject_.getObject(), \n\t\t\"handleDropEvent\", event.cursorPosition(), dragged, \n\t\t\"SimpleGUIComponent::handleDropEvent: \",\n\t\t\"EventsSimpleGUIComponent handleDropEvent retval\" );\n}", "docstring": "/*~ function SimpleGUIComponent.handleDropEvent\n *\t@components{ client, tools }\n *\n *\tThis event handler is triggered a dragged component is dropped overa drop \n *\taccepting component. To have this handler triggered, a component must have \n *\tdropFocus enabled.\n *\n *\t@param\tcomponent\tThe drop target component.\n *\t@param\tposition\tmouse position, 2-tuple (x, y).\n *\t@param\tdropped\t\tThe dragged component being dropped.\n *\n *\t@return\tthe return value is always ignored. \n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/ashes/simple_gui_component.cpp#L4220-L4229", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChunkItemBase::addBorrower", "code": "void ChunkItemBase::addBorrower( Chunk* pChunk )\n{\n\tborrowers_.insert( pChunk );\n\n#if UMBRA_ENABLE\n\tif (pUmbraDrawItem_)\n\t{\n\t\tif (this->chunk()->getUmbraCell() == NULL &&\n\t\t\tpChunk->getUmbraCell() != NULL)\n\t\t{\n\t\t\tpUmbraDrawItem_->updateCell( pChunk->getUmbraCell() );\n\t\t}\n\t}\n#endif // UMBRA_ENABLE\n}", "docstring": "// MF_SERVER\n/**\n *\tThis method adds a chunk as a borrower of this item\n *\twhat this means is that the ChunkItem overlaps this chunk\n *\tas well, which means we need to render as part of this chunk\n *\tif it exists in a different umbra cell to our own cell\n *\t@param pChunk the chunk that is borrowing us\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/chunk/chunk_item.cpp#L283-L297", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UmbraChunkItem::init", "code": "void UmbraChunkItem::init( ChunkItem* pItem, UmbraObjectProxyPtr pObject, const Matrix& transform, Umbra::OB::Cell* pCell )\n{\n\tpItem_ = pItem;\n\n\tpObject_ = pObject;\n\tpObject_->object()->setCell( pCell );\n\tpObject_->object()->setObjectToCellMatrix( (const Umbra::Matrix4x4&)transform );\n\tpObject_->object()->setUserPointer( (void*)this );\n\tpObject_->object()->setBitmask( ChunkUmbra::SCENE_OBJECT );\n}", "docstring": "/**\n *\tThis method inits the UmbraChunkItem\n *\tIt uses the passed in umbra object\n *\t@param pItem the chunk item to use\n *\t@param pObject the umbra ubject to use\n *\t@param transform the transform of the bounding box\n *\t@param pCell the umbra cell to place this item in\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/chunk/umbra_chunk_item.cpp#L158-L167", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AssetList::empty", "code": "bool AssetList::empty() const\n{\n\treturn entries_.empty();\n}", "docstring": "// ----------------------------------------------------------------------------", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/compiled_space/asset_list.cpp#L138-L141", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BinaryFormat::BinaryFormat", "code": "BinaryFormat::BinaryFormat() : \n\tpHeader_(NULL),\n\tpSectionHeaders_(NULL)\n{\n\tmemset( &mappedSections_, 0, sizeof(mappedSections_) );\n}", "docstring": "// ----------------------------------------------------------------------------\n// ----------------------------------------------------------------------------", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/compiled_space/binary_format.cpp#L142-L147", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ParticleSystemWriter::addFromChunkParticles", "code": "void ParticleSystemWriter::addFromChunkParticles(\n\tconst DataSectionPtr& pDS,\n\tconst Matrix& chunkTransform,\n\tStringTableWriter& stringTable,\n\tAssetListWriter& assetList )\n{\n\tBW::string resourceID = pDS->readString( \"resource\" );\n\tif (resourceID.empty())\n\t{\n\t\treturn;\n\t}\n\n\tif (resourceID.find( '/' ) == BW::string::npos)\n\t{\n\t\tresourceID = \"particles/\" + resourceID;\n\t}\n\n\tDataSectionPtr resourceDS = BWResource::openSection( resourceID );\n\tif (!resourceDS)\n\t{\n\t\treturn;\n\t}\n\n\t// ok, we're committed to loading now.\n\tParticleSystemTypes::ParticleSystem data;\n\tmemset( &data, 0, sizeof(data) );\n\n\tdata.resourceID_ = assetList.addAsset(\n\t\tCompiledSpace::AssetListTypes::ASSET_TYPE_DATASECTION,\n\t\tresourceID, stringTable );\n\tdata.seedTime_ = resourceDS->readFloat( \"seedTime\", 0.1f );\n\tbool isReflectionVisible = pDS->readBool( \"reflectionVisible\",\n\t\tfalse );\n\n\tif (isReflectionVisible)\n\t{\n\t\tdata.flags_ |= ParticleSystemTypes::FLAG_REFLECTION_VISIBLE;\n\t}\n\n\t// Transform\n\tdata.worldTransform_ = chunkTransform;\n\tdata.worldTransform_.preMultiply(\n\t\tpDS->readMatrix34( \"transform\", Matrix::identity ) );\n\n\tsystemData_.push_back( data );\n}", "docstring": "// ----------------------------------------------------------------------------", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/compiled_space/binary_writers/particle_system_writer.cpp#L71-L116", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReplayController::handleFinal", "code": "void ReplayController::handleFinal()\n{\n\thandler_.onReplayControllerFinish( *this );\n}", "docstring": "/**\n *\tThis method handles the 'finish' replay message, indicating end of replay.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/connection/replay_controller.cpp#L1384-L1387", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Profiler::setNewHistory", "code": "bool Profiler::setNewHistory( const char * historyFileName, \n\t\t\t\t\t\t\tconst char * threadName, BW::string * msgString)\n{\n\tBW_GUARD;\n\tthis->setProfileMode( SORT_BY_NAME, false );\n\n\treturn csvOutputTask_->setNewHistory( historyFileName,\n\t\tthreadName, msgString );\n}", "docstring": "/**\n *\tSet up a new file for CSV profiling output.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/cstdmf/profiler.cpp#L2204-L2212", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "queueWrites", "code": "void queueWrites()\n\t{\n\t\tfor (uint32 i = 1; i <= max_; ++i)\n\t\t{\n\t\t\tpWriter_->queueWriteBlob( (const char *)&i, sizeof( i ), i );\n\t\t}\n\t}", "docstring": "/**\n\t *\tThis method queues the writes necessary to write out the integers.\n\t */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/cstdmf/unit_test/test_background_file_writer.cpp#L194-L200", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PyModel::pySet_motors", "code": "int PyModel::pySet_motors( PyObject * value )\n{\n\tBW_GUARD;\n\t// first check arguments...\n\tif (!PySequence_Check( value ))\n\t{\n\t\tPyErr_SetString( PyExc_TypeError,\n\t\t\t\"Model.motors must be set to a sequence of Motors\" );\n\t\treturn -1;\n\t}\n\n\t// ... thoroughly\n\tbool bad = false;\n\tfor (int i = 0; i < PySequence_Size( value ) && !bad; i++)\n\t{\n\t\tPyObject * pTry = PySequence_GetItem( value, i );\n\n\t\tif (!Motor::Check( pTry ))\n\t\t{\n\t\t\tPyErr_Format( PyExc_TypeError, \"Element %d of sequence replacing \"\n\t\t\t\t\" Model.motors is not a Motor\", i );\n\t\t\tbad = true;\n\t\t}\n\t\telse if (((Motor*)pTry)->pOwner() != NULL)\n\t\t{\n\t\t\tPyErr_Format( PyExc_ValueError, \"Element %d of sequence replacing \"\n\t\t\t\t\"Model.motors is already attached to a Model\", i );\n\t\t\tbad = true;\n\t\t}\n\n\t\tPy_DECREF( pTry );\n\t}\n\tif (bad) return -1;\n\n\n\t// let old motors go\n\tfor (uint i = 0; i < motors_.size(); i++)\n\t{\n\t\tmotors_[i]->detach();\n\t\tPy_DECREF( motors_[i] );\n\t}\n\tmotors_.clear();\n\n\t// fit new ones\n\tfor (int i = 0; i < PySequence_Size( value ) && !bad; i++)\n\t{\n\t\tMotor * pMotor = (Motor*)PySequence_GetItem( value, i );\n\n\t\tpMotor->attach( this );\n\t\tmotors_.push_back( pMotor );\n\t\t// We keep the reference returned by PySequence_GetItem.\n\t}\n\n\treturn 0;\n}", "docstring": "/**\n *\tSet the sequence of motors\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/duplo/pymodel.cpp#L2247-L2301", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FixedDictDataType::getDefaultValue", "code": "bool FixedDictDataType::getDefaultValue( DataSink & output ) const\n{\n#if defined( SCRIPT_PYTHON )\n\tconst_cast(this)->initCustomClassImplOnDemand();\n#endif\n\n\tif (pDefaultSection_)\n\t{\n\t\treturn this->createFromSection( pDefaultSection_, output );\n\t}\n\telse if (allowNone_)\n\t{\n\t\treturn output.writeNone( /* isNone */ true );\n\t}\n\n\t// TODO: Better than this.\n\tScriptObject pDefault = this->createDefaultInstance();\n#if defined( SCRIPT_PYTHON )\n\tif (this->hasCustomClass())\n\t\tpDefault = this->createCustomClassFromInstance(\n\t\t\tstatic_cast(pDefault.get()) );\n#endif\n\n\t// Eww...\n\tScriptDataSink & scriptOutput = static_cast< ScriptDataSink & >( output );\n\treturn scriptOutput.write( pDefault );\n}", "docstring": "/**\n *\tOverrides the DataType method.\n *\n *\t@see DataType::getDefaultValue\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/entitydef/data_types/fixed_dict_data_type.cpp#L726-L752", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FixedDictDataType::attach", "code": "ScriptObject FixedDictDataType::attach( ScriptObject pObject,\n\tPropertyOwnerBase * pOwner, int ownerRef )\n{\n#if defined( SCRIPT_PYTHON )\n\t// if it's None and that's ok, just return that\n\tif (allowNone_ && pObject.isNone())\n\t{\n\t\treturn ScriptObject::none();\n\t}\n\n\tinitCustomClassImplOnDemand();\n\n\tif (this->hasCustomClass())\n\t{\n\t\t// First part of isSameType() check\n\t\tif (this->hasCustomIsSameType() && !this->isSameTypeCustom( pObject ))\n\t\t{\n\t\t\tERROR_MSG( \"FixedDictDataType::attach: \"\n\t\t\t\t\t\"Trying to attach an invalid custom type\\n\" );\n\t\t\treturn ScriptObject();\n\t\t}\n\n\t\t// See if they are referencing our PyFixedDictDataInstance\n\t\tScriptObject pDict = this->getDictFromCustomClass( pObject );\n\n\t\t// Second part of isSameType() check\n\t\tif (PyFixedDictDataInstance::isSameType( pDict, *this ))\n\t\t{\n\t\t\t// Yay! isSameType() == true\n\t\t\tPyFixedDictDataInstancePtr pInst( pDict );\n\t\t\tif (pInst->hasOwner())\n\t\t\t{\n\t\t\t\t// Create copy\n\t\t\t\tpInst = PyFixedDictDataInstancePtr(\n\t\t\t\t\tnew PyFixedDictDataInstance( this, *pInst ),\n\t\t\t\t\tPyFixedDictDataInstancePtr::FROM_NEW_REFERENCE );\n\t\t\t\tpInst->setOwner( pOwner, ownerRef );\n\t\t\t\treturn this->createCustomClassFromInstance( pInst.get() );\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tpInst->setOwner( pOwner, ownerRef );\n\t\t\t\treturn pObject;\n\t\t\t}\n\t\t}\n\t\telse\t// not referencing PyFixedDictDataInstance\n\t\t{\n\t\t\t// Third part of isSameType() check, for custom class without\n\t\t\t// isSameType() method and doesn't reference PyFixedDictDataInstance\n\t\t\tif (this->hasCustomIsSameType() ||\n\t\t\t\tthis->createInstanceFromMappingObj( pDict ))\n\t\t\t{\n\t\t\t\treturn pObject;\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\treturn ScriptObject();\n\t\t\t}\n\t\t}\n\t}\n\n\tPyFixedDictDataInstancePtr pInst;\n\n\t// it's easy if it's the right python + entitydef type\n\tif (PyFixedDictDataInstance::isSameType( pObject, *this ))\n\t{\n\t\tpInst = pObject;\n\t\tif (pInst->hasOwner())\n\t\t{\n\t\t\t// Create copy\n\t\t\tpInst = PyFixedDictDataInstancePtr(\n\t\t\t\tnew PyFixedDictDataInstance( this, *pInst ),\n\t\t\t\tPyFixedDictDataInstancePtr::FROM_NEW_REFERENCE );\n\t\t\t// note: up to caller to check that prop isn't being set back\n\t\t}\n\t}\n\telse\n\t{\n\t\tpInst = this->createInstanceFromMappingObj( pObject );\n\t}\n\n\tif (pInst)\n\t{\n\t\tpInst->setOwner( pOwner, ownerRef );\n\t}\n\n\treturn pInst;\n#else\n\treturn pObject;\n#endif\n}", "docstring": "/**\n *\tOverrides the DataType method.\n *\n *\t@see DataType::attach\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/entitydef/data_types/fixed_dict_data_type.cpp#L1016-L1106", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GizmoManager::click", "code": "bool GizmoManager::click()\n{\n\tBW_GUARD;\n\n\tif (intersectedGizmo_.hasObject())\n\t{\n\t\tintersectedGizmo_->click(lastWorldOrigin_, lastWorldRay_);\n\t\treturn true;\n\t}\n\treturn false;\n}", "docstring": "/**\n * This method should be called when the user clicks on a gizmo\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/gizmo/gizmo_manager.cpp#L261-L271", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OmniLight::OmniLight", "code": "OmniLight::OmniLight( const D3DXCOLOR& colour, const Vector3& position, float innerRadius, float outerRadius )\n: position_( position ),\n innerRadius_( innerRadius ),\n outerRadius_( outerRadius ),\n colour_( (const Colour&)colour ),\n priority_( 0 )\n#ifdef EDITOR_ENABLED\n ,multiplier_(1.f)\n#endif\n{\n\t// give the worldTransformed attributes default values.\n\tworldPosition_ = position;\n\tworldInnerRadius_= innerRadius;\n\tworldOuterRadius_= outerRadius;\n}", "docstring": "/**\n *\tConstructor\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/moo/omni_light.cpp#L37-L51", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DynamicShadow::recreateForD3DExDevice", "code": "bool DynamicShadow::recreateForD3DExDevice() const\n {\n return true;\n }", "docstring": "//----------------------------------------------------------------------------------------------", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/moo/dynamic_shadow.cpp#L1496-L1499", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HTTPHeaders::Impl::parseHeaderValue", "code": "bool HTTPHeaders::Impl::parseHeaderValue( const char ** pByteData, \n\t\tsize_t length, BW::string & headerValue )\n{\n\tconst char * byteData = *pByteData;\n\tconst char * cursor = byteData;\n\n\tconst char * headerValueChunkStart = cursor;\n\tBW::ostringstream headerValueStream;\n\tbool leadingLWS = true;\n\twhile (cursor < (byteData + length) &&\n\t\t\t!HTTPUtil::isCRLF( cursor ))\n\t{\n\t\tif (HTTPUtil::isLinearWhitespace( cursor,\n\t\t\t\tlength - (cursor - byteData) ))\n\t\t{\n\t\t\tif (!leadingLWS)\n\t\t\t{\n\t\t\t\t// Replace any non-leading LWS with a single space, as per\n\t\t\t\t// RFC2616 section 4.2.\n\t\t\t\theaderValueStream.write( headerValueChunkStart,\n\t\t\t\t\tcursor - headerValueChunkStart );\n\t\t\t\theaderValueStream.put( ' ' );\n\t\t\t}\n\n\t\t\tif (!HTTPUtil::skipLinearWhitespace( &cursor,\n\t\t\t\t\tlength - (cursor - byteData) ))\n\t\t\t{\n\t\t\t\treturn false;\n\t\t\t}\n\n\t\t\theaderValueChunkStart = cursor;\n\t\t}\n\t\telse if (*cursor == '\"')\n\t\t{\n\t\t\tBW::string quotedString;\n\t\t\tif (!HTTPUtil::parseQuotedString( &cursor,\n\t\t\t\t\tlength - (cursor - byteData ), quotedString ))\n\t\t\t{\n\t\t\t\treturn false;\n\t\t\t}\n\n\t\t\theaderValueStream << quotedString;\n\t\t\theaderValueChunkStart = cursor;\n\t\t}\n\t\telse if (HTTPUtil::isControlCharacter( *cursor ))\n\t\t{\n\t\t\treturn false;\n\t\t}\n\t\telse\n\t\t{\n\t\t\t++cursor;\n\t\t}\n\t\tleadingLWS = false;\n\t}\n\n\n\tif (cursor >= (byteData + length))\n\t{\n\t\treturn false;\n\t}\n\n\theaderValueStream.write( headerValueChunkStart,\n\t\tcursor - headerValueChunkStart );\n\n\theaderValueStream.str().swap( headerValue );\n\n\t*pByteData = cursor + 2;\n\treturn true;\n}", "docstring": "/**\n *\tThis method parses the header field value.\n *\n *\tAny non-leading linear whitespace will be replaced by a single space, as\n *\tper RFC2616. Leading linear whitespace will be ignored.\n *\n *\t@param pByteData \tThe byte pointer, moved on success to past the end of\n *\t\t\t\t\t\tthe terminating CRLF.\n *\t@param length \t\tThe length of the byte data.\n *\t@pawram headerValue The output parsed header value.\n *\n *\t@return \t\t\tTrue on success, false otherwise.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/network/http_messages.cpp#L699-L767", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "operator delete", "code": "void operator delete( void * ptr )\n\t\t{ bw_free( ptr ); }", "docstring": "/**\n\t *\tThe overloaded delete operator to free the memory using the same\n\t *\tmalloc/free methods\n\t */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/network/logger_endpoint.cpp#L161-L162", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TCPChannel::writeFrom", "code": "bool TCPChannel::writeFrom( BinaryIStream & input, bool shouldCork )\n{\n\tint numToSend = input.remainingLength();\n\n\tif (pSendBuffer_)\n\t{\n\t\t// We're already waiting for the socket to become available for\n\t\t// writing, add it to the end.\n\t\tpSendBuffer_->transfer( input, numToSend );\n\t\treturn true;\n\t}\n\n\tint sendResult = this->basicSend( input.retrieve( 0 ), numToSend,\n\t\tshouldCork );\n\n\tif ((sendResult == -1) && !isErrnoIgnorable())\n\t{\n\t\tNOTICE_MSG( \"TCPChannel::writeFrom( %s ): \"\n\t\t\t\t\"Write error, destroying; error: %s\\n\",\n\t\t\tthis->c_str(), lastNetworkError() );\n\n\t\tinput.finish();\n\n\t\tthis->destroy();\n\t\treturn false;\n\t}\n\telse if (sendResult == -1)\n\t{\n\t\tsendResult = 0;\n\t}\n\n\tinput.retrieve( sendResult );\n\n\tif (sendResult < numToSend)\n\t{\n\t\t// Got short count when sending. Add the rest to the send buffer,\n\t\t// creating it and registering the socket for write events if\n\t\t// necessary.\n\n\t\tconst uint8 * leftOverData =\n\t\t\treinterpret_cast< const uint8 * >( input.retrieve(\n\t\t\t\tnumToSend - sendResult ) );\n\n\t\tif (!pSendBuffer_)\n\t\t{\n\t\t\tpSendBuffer_ = new MemoryOStream( this->maxSegmentSize() );\n\n\t\t\tthis->dispatcher().registerWriteFileDescriptor(\n\t\t\t\tpEndpoint_->fileno(),\n\t\t\t\tpSendWaiter_,\n\t\t\t\t\"TCPChannel\" );\n\t\t}\n\n\t\tpSendBuffer_->addBlob( leftOverData, numToSend - sendResult );\n\t}\n\n\treturn true;\n}", "docstring": "/**\n *\tThis method writes the given input data to the channel.\n *\n *\t@param input \t\tThe input data stream.\n *\t@param shouldCork \tWhether we should send any corked data and this data\n *\t\t\t\t\t\tnow, or cork the data for sending later.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/network/tcp_channel.cpp#L707-L764", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "swap", "code": "void swap( UDPBundleProcessor::iterator & a, UDPBundleProcessor::iterator & b )\n{\n\tusing std::swap;\n\n\tswap( a.cursor_, b.cursor_ );\n\tswap( a.isUnpacked_, b.isUnpacked_ );\n\tswap( a.bodyEndOffset_, b.bodyEndOffset_ );\n\tswap( a.offset_, b.offset_ );\n\tswap( a.dataOffset_, b.dataOffset_ );\n\tswap( a.dataLength_, b.dataLength_ );\n\tswap( a.dataBuffer_, b.dataBuffer_ );\n\tswap( a.nextRequestOffset_, b.nextRequestOffset_ );\n\tswap( a.curHeader_, b.curHeader_ );\n\tswap( a.updatedIE_, b.updatedIE_ );\n}", "docstring": "/**\n * This function swaps the iterators.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/network/udp_bundle_processor.cpp#L623-L637", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SplatPSA::lateExecute", "code": "void SplatPSA::lateExecute( ParticleSystem &particleSystem, float dTime )\n{\n\tBW_GUARD_PROFILER( SplatPSA_execute );\n\n\tconst SourcePSA * pSource = static_cast< const SourcePSA * >(\n\t\t&*particleSystem.pAction( PSA_SOURCE_TYPE_ID ) );\n\n\tif (!pSource)\n\t{\n\t\treturn;\n\t}\n\n\tconst RompColliderPtr pGS = pSource->groundSpecifier();\n\tif (!pGS)\n\t{\n\t\treturn;\n\t}\n\n\tuint64 tend = timestamp() + stampsPerSecond() / 2000;\n\n\tParticles::iterator it = particleSystem.begin();\n\tParticles::iterator end = particleSystem.end();\n\n\tWorldTriangle tri;\n\n\t//bust out of the loop if we take more than 0.5 msec\n\twhile (it != particleSystem.end() && timestamp() < tend)\n\t{\n\t\tParticle & particle = *it;\n\n\t\tif (!particle.isAlive())\n\t\t{\t\t\n\t\t\tcontinue;\n\t\t}\n\n\t\t//note - particles get moved after actions.\n\t\tVector3 velocity;\n\t\tparticle.getVelocity( velocity );\n\t\tVector3 newPos;\n\t\tparticleSystem.predictPosition( particle, dTime, newPos );\n\t\tfloat tValue = pGS->collide( particle.position(), newPos, tri );\n\t\tif (tValue >= 0.f && tValue <= 1.f)\n\t\t{\n#ifndef EDITOR_ENABLED\n\t\t\ttri.bounce( velocity, 1.f );\n\t\t\tparticle.setVelocity( velocity );\n\n\t\t\tif ( callback_ )\n\t\t\t{\n\t\t\t\tPyObject * pFn = PyObject_GetAttrString(\n\t\t\t\t\t&*callback_, \"onSplat\" );\n\t\t\t\tPyObject * pTuple = PyTuple_New( 3 );\n\t\t\t\tVector3 collidePos( particle.position() * tValue );\n\t\t\t\tcollidePos += (newPos * (1.f - tValue));\n\t\t\t\tPyTuple_SetItem( pTuple, 0, Script::getData( collidePos ) );\n\n\t\t\t\tVector3 velocity;\n\t\t\t\tparticle.getVelocity( velocity );\n\t\t\t\tPyTuple_SetItem( pTuple, 1, Script::getData( velocity ) );\n\n\t\t\t\tPyTuple_SetItem( pTuple, 2, Script::getData( \n\t\t\t\t\tColour::getVector4Normalised( particle.colour() ) ) );\n\t\t\t\tScript::callNextFrame( pFn, pTuple, \"SplatPSA::execute\" );\n\t\t\t}\n#endif\n\t\t\tit = particleSystem.removeParticle( it );\n\t\t}\n\t\telse\n\t\t{\n\t\t\t++it;\n\t\t}\n\t}\n}", "docstring": "/**\n *\tThis method executes the action for the given frame of time. The dTime\n *\tparameter is the time elapsed since the last call.\n *\n *\t@param particleSystem\tThe particle system on which to operate.\n *\t@param dTime\t\t\tElapsed time in seconds.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/particle/actions/splat_psa.cpp#L50-L122", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PyDataSection::s_length", "code": "Py_ssize_t PyDataSection::s_length( PyObject * self )\n{\n\treturn ((PyDataSection *) self)->length();\n}", "docstring": "/**\n *\tThis function returns the number of entities in the system.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/pyscript/py_data_section.cpp#L50-L53", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PackedSection::asUInt64", "code": "uint64 PackedSection::asUInt64( uint64 defaultVal )\n{\n\t// TODO: store 64-bit values as TYPE_INT? Murph suggested using 9 bytes for\n\t// uint64s as a special case.\n\tDataSectionPtr pDS = getXMLSection();\n\tpDS->setString( static_cast< DataSection * >( this )->asString() );\n\treturn pDS->asUInt64( defaultVal );\n}", "docstring": "/*\n *\tOverride from DataSection.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/resmgr/packed_section.cpp#L856-L863", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ZipFileSystem::LocalFile::diskName", "code": "BW::string\tZipFileSystem::LocalFile::diskName()\n{\n\treturn isFolder()? filename_ + \"/\": filename_;\n}", "docstring": "/**\n*\tThis method return the name of the file that should be written on the disk\n*\n*\t@return the name of the file\n*/", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/resmgr/zip_file_system.cpp#L1546-L1549", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GlyphReferenceHolder::report", "code": "void GlyphReferenceHolder::report() const\n{\n\tBW_GUARD;\n\tGlyphReferenceCountMap::const_iterator it = refCounts_.begin();\n\tGlyphReferenceCountMap::const_iterator en = refCounts_.end();\n\twhile (it != en)\n\t{\n\t\tDEBUG_MSG( \"%c - %d\\n\", it->first, it->second );\n\t\t++it;\n\t}\n}", "docstring": "/**\n *\tThis method lists all the reference counts as debug messages.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/romp/glyph_reference_holder.cpp#L84-L94", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "XConsole::print", "code": "void XConsole::print( const char* string )\n{\n\tBW_GUARD;\n\twchar_t converted[2] = {0, 0};\n\tsize_t numBytes = bw_utf8tow_incremental( string, converted );\n\twhile (numBytes > 0)\n\t{\n\t\tMF_ASSERT( !converted[1] );\n\t\tthis->print( converted );\n\t\tstring += numBytes;\n\t\tnumBytes = bw_utf8tow_incremental( string, converted );\n\t}\n}", "docstring": "/**\n *\tThis method prints the input string to the console at the current cursor\n *\tposition.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/romp/xconsole.cpp#L246-L258", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ListMultiProvider::~ListMultiProvider", "code": "ListMultiProvider::~ListMultiProvider()\n{\n\tBW_GUARD;\n}", "docstring": "/**\n *\tDestructor.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/lib/ual/list_multi_provider.cpp#L76-L79", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "onReplayDataFileReaderHeader", "code": "virtual bool onReplayDataFileReaderHeader( ReplayDataFileReader & reader,\n\t\tconst ReplayHeader & header, BW::string & errorString )\n\t{\n\t\theader_ = header;\n\t\treturn true;\n\t}", "docstring": "/* Override from IReplayDataFileReaderListener. */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/server/baseapp/unit_test/test_recording.cpp#L400-L405", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ClientCaller::ClientCaller", "code": "ClientCaller::ClientCaller( Entity & entity, \n\t\t\tconst MethodDescription & methodDescription,\n\t\t\tbool isForOwn, \n\t\t\tbool isForOthers, \n\t\t\tRecordingOption recordingOption,\n\t\t\tconst BW::string & destEntityTypeName,\n\t\t\tEntityID destID,\n\t\t\tPyTypeObject * pType /* = &ClientCaller::s_type_ */ ) :\n\t\tPyObjectPlus( pType ),\n\t\tentity_( entity ),\n\t\tmethodDescription_( methodDescription ),\n\t\tisForOwn_( isForOwn ),\n\t\tisForOthers_( isForOthers ),\n\t\tisExposedForReplay_( false ),\n\t\tdestID_( destID ),\n\t\tdestEntityTypeName_( destEntityTypeName )\n{\n\tPy_INCREF( &entity_ );\n\n\tswitch (recordingOption)\n\t{\n\tcase RECORDING_OPTION_METHOD_DEFAULT:\n\t\tif (isForOwn && !isForOthers)\n\t\t{\n\t\t\tisExposedForReplay_ = \n\t\t\t\t(methodDescription.replayExposureLevel() >= \n\t\t\t\t\tMethodDescription::REPLAY_EXPOSURE_LEVEL_ALL_CLIENTS);\n\t\t}\n\t\telse if (isForOthers)\n\t\t{\n\t\t\tisExposedForReplay_ = \n\t\t\t\t(methodDescription.replayExposureLevel() >= \n\t\t\t\t\tMethodDescription::REPLAY_EXPOSURE_LEVEL_OTHER_CLIENTS);\n\t\t}\n\t\tbreak;\n\n\tcase RECORDING_OPTION_RECORD_ONLY:\n\t\tisForOwn_ = false;\n\t\tisForOthers_ = false;\n\t\t// Fall through\n\tcase RECORDING_OPTION_RECORD:\n\t\tisExposedForReplay_ = true;\n\t\tbreak;\n\n\tcase RECORDING_OPTION_DO_NOT_RECORD:\n\tdefault:\n\t\t// Leave isExposedForReplay_ to false\n\t\tbreak;\n\t}\n}", "docstring": "/**\n *\tConstructor.\n *\n *\t@param methodDescription \tThe method description.\n *\t@param isForOwn \t\t\tWhether this will be sent to the entity's own\n *\t\t\t\t\t\t\t\tclient.\n *\t@param isForOthers \t\t\tWhether this will be sent to other clients that\n *\t\t\t\t\t\t\t\thave this entity in their AoI.\n *\t@param recordingOption\t\tSee RecordingOption enum.\n *\t@param destEntityTypeName \tThe name of the destination entity's type.\n *\t@param destID \t\t\t\tThe destination entity's ID.\n *\t@param pType \t\t\t\tThe type object to initialise this Python\n *\t\t\t\t\t\t\t\tobject to.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/server/cellapp/py_client.cpp#L99-L148", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Witness::deleteFromClient", "code": "void Witness::deleteFromClient( Mercury::Bundle & bundle,\n\tEntityCache * pCache )\n{\n\tpCache->clearRefresh();\n\n\tEntityID id = pCache->pEntity()->id();\n\n\tif (!pCache->isEnterPending())\n\t{\n\t\tpCache->addLeaveAoIMessage( bundle, id );\n\t}\n\n\tthis->onLeaveAoI( pCache, id );\n\n\t// Reset client related state\n\tpCache->onEntityRemovedFromClient();\n}", "docstring": "/**\n *\tThis method informs the client that an entity has left its AoI.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/server/cellapp/witness.cpp#L1922-L1938", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CellApp::onSignalled", "code": "void CellApp::onSignalled( int sigNum )\n{\n\tif (sigNum == SIGQUIT)\n\t{\n\t\t// Just print out some information, and pass it up to EntityApp to dump\n\t\t// core.\n\n\t\tERROR_MSG( \"CellApp::onSignalled: \"\n\t\t\t\t\"load = %f. emergencyThrottle = %f. \"\n\t\t\t\t\"Time since tick = %f seconds\\n\",\n\t\t\tthis->getLoad(), this->emergencyThrottle(),\n\t\t\tstampsToSeconds( timestamp() - this->lastGameTickTime() ) );\n\t}\n\n\tthis->EntityApp::onSignalled( sigNum );\n}", "docstring": "/**\n *\tSignal handler.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/server/cellapp/cellapp.cpp#L3606-L3621", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MainApp::addBotsWithName", "code": "void MainApp::addBotsWithName( PyObjectPtr pCredentialSequence )\n{\n\tif (!pCredentialSequence || pCredentialSequence == Py_None)\n\t{\n\t\tPyErr_SetString( PyExc_TypeError,\n\t\t\t\"Bots::addBotsWithName: Empty login info. \"\n\t\t\t\"Expecting a list of tuples containing username and password.\" );\n\t\treturn;\n\t}\n\n\tif (!PySequence_Check( pCredentialSequence.get() ))\n\t{\n\t\tPyErr_SetString( PyExc_TypeError, \"Bots::addBotsWithName: \"\n\t\t\t\"Expecting a list of tuples containing username and password.\" );\n\t\treturn;\n\t}\n\n\tPy_ssize_t numCredentials = PySequence_Size(\n\t\tpCredentialSequence.get() );\n\tfor (Py_ssize_t i = 0; i < numCredentials; ++i)\n\t{\n\t\tPyObject * pCredentials = PySequence_GetItem(\n\t\t\tpCredentialSequence.get(), i );\n\t\tif (!PyTuple_Check( pCredentials ) || PyTuple_Size( pCredentials ) != 2)\n\t\t{\n\t\t\tPyErr_Format( PyExc_TypeError,\n\t\t\t\t\"Bots::addBotsWithName: Argument list item %\" PRIzd \" must \"\n\t\t\t\t\"be tuple of two strings.\", i );\n\n\t\t\tPy_XDECREF( pCredentials );\n\t\t\treturn;\n\t\t}\n\n\t\tPyObject * pClientName = PySequence_GetItem( pCredentials, 0 );\n\t\tPyObject * pClientPassword = PySequence_GetItem( pCredentials, 1 );\n\n\t\tPy_DECREF( pCredentials );\n\n\t\tif (!PyString_Check( pClientName ) ||\n\t\t\t!PyString_Check( pClientPassword ))\n\t\t{\n\t\t\tPyErr_Format( PyExc_TypeError, \"Bots::addBotsWithName: \"\n\t\t\t\t\"Invalid credentials for element %\" PRIzd \". Expecting a tuple \"\n\t\t\t\t\"containing a username and password.\", i );\n\n\t\t\tPy_XDECREF( pClientName );\n\t\t\tPy_XDECREF( pClientPassword );\n\n\t\t\treturn;\n\t\t}\n\n\t\tthis->addBotWithName( BW::string( PyString_AsString( pClientName ) ),\n\t\t\tBW::string( PyString_AsString( pClientPassword ) ) );\n\n\t\tPy_DECREF( pClientName );\n\t\tPy_DECREF( pClientPassword );\n\t}\n}", "docstring": "/**\n *\tThis method adds a set of new bot client applications to the Bots process\n *\tusing a pregenerated list of usernames and passwords.\n *\n *\t@param pCredentialSequence The Python list containing tuples of\n *\t username/password pairs to be used in creating\n *\t the ClientApps.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/server/tools/bots/main_app.cpp#L441-L498", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "~BotAdder", "code": "virtual ~BotAdder()\n\t{\n\t\ttimer_.cancel();\n\t}", "docstring": "/**\n\t *\tDestructor.\n\t */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/server/tools/bots/main_app.cpp#L1208-L1211", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InputOptions::setMipmapFilter", "code": "void InputOptions::setMipmapFilter(MipmapFilter filter)\n{\n\tm.mipmapFilter = filter;\n}", "docstring": "/// Set mipmap filter.", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/nvtt/src/nvtt/InputOptions.cpp#L278-L281", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "setNormalMap", "code": "void setNormalMap(nvtt::InputOptions & inputOptions)\n{\n\tinputOptions.setNormalMap(true);\n\tinputOptions.setConvertToNormalMap(false);\n\tinputOptions.setGamma(1.0f, 1.0f);\n\tinputOptions.setNormalizeMipmaps(true);\n}", "docstring": "// Set options for normal maps.", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/nvtt/src/nvtt/tools/benchmark.cpp#L59-L65", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VisualProcessor::VisualProcessor", "code": "VisualProcessor::VisualProcessor( const BW::string & params ) :\n\tConverter( params )\n{\n\tBW_GUARD;\n}", "docstring": "/* construct a converter with parameters. */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/tools/asset_pipeline/converters/visual_processor/visual_processor.cpp#L46-L50", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Undo::updateActions", "code": "void Undo::updateActions( void )\n{\n\tif ( undoAction_ )\n \tundoAction_->Enabled = undoEnabled_;\n\n if ( redoAction_ )\n \tredoAction_->Enabled = redoEnabled_;\n}", "docstring": "//---------------------------------------------------------------------------", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/tools/common/undo.cpp#L477-L484", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CommonUtility::removePath", "code": "AnsiString __fastcall CommonUtility::removePath( AnsiString filename )\n{\n\tAnsiString name;\n int start, backSlashPos, forewardSlashPos, end;\n\n backSlashPos = filename.LastDelimiter( \"\\\\\" ) + 1;\n forewardSlashPos = filename.LastDelimiter( \"/\" ) + 1;\n\n if ( backSlashPos > forewardSlashPos )\n \tstart = backSlashPos;\n else\n \tstart = forewardSlashPos;\n\n end = filename.Length() + 1;\n\n if ( start < end )\n {\n\t\tfor ( int i = start; i < end; i++ )\n \t\tname += filename[i];\n }\n else\n \tname = filename;\n\n return name;\n}", "docstring": "//---------------------------------------------------------------------------", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/tools/common/common_utility.cpp#L199-L223", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LightingPlan::VirtualLight::drawDirectional", "code": "void LightingPlan::VirtualLight::drawDirectional()\n{\n}", "docstring": "/**\n * \tThis method draws a directional light, as a cylinder\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/tools/common/lighting_plan.cpp#L149-L151", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExpandSpaceDlg::validationChange", "code": "void ExpandSpaceDlg::validationChange( bool validationResult )\n{\n\tif ( validationResult )\n\t{\n\t\tbtnExpand_.EnableWindow( TRUE );\n\t}\n\telse\n\t{\n\t\tbtnExpand_.EnableWindow( FALSE );\n\t}\n}", "docstring": "/*virtual */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/tools/worldeditor/gui/dialogs/expand_space_dlg.cpp", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PageTerrainImport::UpdateState", "code": "void PageTerrainImport::UpdateState()\n{\n\tBW_GUARD;\n\n if (!pageReady_)\n InitPage();\n\n if (!HeightModule::hasStarted())\n return;\n\n HeightModule *hm = HeightModule::currentInstance();\n\n BOOL importing = hm->hasImportData();\n BOOL exporting = !importing;\n\n\t// Inform the height module about the mode and strengths:\n\tif (importing)\n\t\thm->mode(HeightModule::IMPORT_TERRAIN);\n\telse if (exporting)\n\t\thm->mode(HeightModule::EXPORT_TERRAIN);\n\n\t// Update subclassed controls:\n\texportBtn_ .EnableWindow(exporting);\n heightStrengthEdit_ .EnableWindow(importing);\n heightStrengthSlider_.EnableWindow(importing);\n modeCB_ .EnableWindow(importing);\n placeBtn_ .EnableWindow(importing);\n cancelBtn_ .EnableWindow(importing);\n}", "docstring": "/**\n *\tThis is called to update the state of the controls.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/tools/worldeditor/gui/pages/page_terrain_import.cpp#L909-L937", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SceneBrowserDlg::OnUpdateControls", "code": "LRESULT SceneBrowserDlg::OnUpdateControls( WPARAM wParam, LPARAM lParam )\n{\n\tBW_GUARD;\n\n\tstatic DogWatch dw( \"SceneBrowserTick\" );\n\tScopedDogWatch sdw( dw );\n\n\tuint64 startTick = timestamp();\n\n\tbool needsUpdateStatusBar = false;\n\n\tbool forceListTick = false;\n\tif (ItemInfoDB::instance().needsTick())\n\t{\n\t\t// The user must have changed something, try to spend some time updating\n\t\t// the item db, and make sure the list updated on the next frame.\n\t\tItemInfoDB::instance().tick( MAX_DB_MILLIS * MAX_DB_MILLIS_MULTIPLIER );\n\t\tforceListTick = true;\n\t}\n\n\tbool showWorkingAnim = ItemInfoDB::instance().hasChanged();\n\n\tuint64 timeSinceLastUpdate = \n\t\t(timestamp() - lastUpdate_) * 1000 / stampsPerSecond();\n\n\tif (forceListTick || list_.needsTick() || timeSinceLastUpdate > CHECK_UPDATE_LIST_MILLIS)\n\t{\n\t\t// Time to update the list.\n\t\tif (list_.tick( forceListTick ))\n\t\t{\n\t\t\tneedsUpdateStatusBar = true;\n\t\t\tshowWorkingAnim = true;\n\t\t}\n\t\tlastUpdate_ = timestamp();\n\t}\n\n\tif (showWorkingAnim)\n\t\tworkingAnim_.show();\n\telse\n\t\tworkingAnim_.hide();\n\n\tworkingAnim_.update();\n\n\tif (list_.hasSelectionChanged())\n\t{\n\t\tstatic DogWatch dw( \"SetAppSelection\" );\n\t\tScopedDogWatch sdw( dw );\n\n\t\t// Selection changed by the list after user interaction\n\t\tif (SceneBrowser::instance().callbackSelChange())\n\t\t{\n\t\t\tCWaitCursor wait;\n\t\t\t(*SceneBrowser::instance().callbackSelChange())(\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tlist_.selection() );\n\t\t}\n\n\t\tif (SceneBrowser::instance().callbackCurrentSel())\n\t\t{\n\t\t\tconst BW::vector & cbSelection =\n\t\t\t\t\t\t\t(*SceneBrowser::instance().callbackCurrentSel())();\n\t\t\tlastSelection_ = cbSelection;\n\t\t}\n\n\t\tneedsUpdateStatusBar = true;\n\t\tlist_.clearSelectionChanged();\n\t}\n\telse if (SceneBrowser::instance().callbackCurrentSel())\n\t{\n\t\tstatic DogWatch dw( \"GetAppSelection\" );\n\t\tScopedDogWatch sdw( dw );\n\n\t\t// Selection changed from the outside\n\t\tconst BW::vector & cbSelection =\n\t\t\t\t\t\t\t(*SceneBrowser::instance().callbackCurrentSel())();\n\t\tif (lastSelection_ != cbSelection)\n\t\t{\n\t\t\tlastSelection_ = cbSelection;\n\n\t\t\t// Make sure the list's and DB's items are to date.\n\t\t\tItemInfoDB::instance().tick();\n\t\t\tlist_.tick();\n\t\t\tlastUpdate_ = timestamp();\n\n\t\t\tlist_.selection( lastSelection_ );\n\t\t\tneedsUpdateStatusBar = true;\n\t\t}\n\t}\n\t\n\tif (needsUpdateStatusBar)\n\t{\n\t\tstatic DogWatch dw( \"UpdateStatusBar\" );\n\t\tScopedDogWatch sdw( dw );\n\n\t\tupdateStatusBar();\n\t}\n\n\t// Update the database if there's still remaining tick time. We do this to\n\t// reduce spikes in the frame rate.\n\tint ellapsedTickMillis =\n\t\t\tint( (timestamp() - startTick) * 1000 / stampsPerSecond() );\n\n\tint multiplier = std::min(\n\t\t\t1 + ItemInfoDB::instance().numPending() / MAX_DB_MILLIS_STEP,\n\t\t\tMAX_DB_MILLIS_MULTIPLIER );\n\tint maxMillis = MAX_DB_MILLIS * multiplier;\n\tint dbTickMaxMillis = std::max( 0, maxMillis - ellapsedTickMillis );\n\n\tItemInfoDB::instance().tick( dbTickMaxMillis );\n\n\treturn 0;\n}", "docstring": "/**\n *\tThis message is sent from the app once each frame.\n *\n *\t@param wParam\tunused.\n *\t@param lParam\tunused.\n *\t@return\t\tIgnored.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/tools/worldeditor/gui/scene_browser/scene_browser_dlg.cpp#L472-L582", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pinParameter", "code": "static float pinParameter(float data)\n{\n\tif (data < 0.0f) return 0.0f;\n\tif (data > 1.0f) return 1.0f;\n\treturn data;\n}", "docstring": "//airwindows likes to ignore this stuff. Make your own programs, and make a different plugin rather than\n//trying to do versioning and preventing people from using older versions. Maybe they like the old one!", "url": "https://github.com/baconpaul/airwin2rack/blob/ecda72c1ec5211885bf4f83e61abae80e0457cd3/src/autogen_airwin/Crystal.cpp#L48-L53", "sha": "ecda72c1ec5211885bf4f83e61abae80e0457cd3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LRConvolve::getEffectName", "code": "bool LRConvolve::getEffectName(char* name) {\n vst_strncpy(name, \"LRConvolve\", kVstMaxProductStrLen); return true;\n}", "docstring": "// 1 = yes, -1 = no, 0 = don't know", "url": "https://github.com/baconpaul/airwin2rack/blob/ecda72c1ec5211885bf4f83e61abae80e0457cd3/src/autogen_airwin/LRConvolve.cpp#L67-L69", "sha": "ecda72c1ec5211885bf4f83e61abae80e0457cd3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pinParameter", "code": "static float pinParameter(float data)\n{\n\tif (data < 0.0f) return 0.0f;\n\tif (data > 1.0f) return 1.0f;\n\treturn data;\n}", "docstring": "//airwindows likes to ignore this stuff. Make your own programs, and make a different plugin rather than\n//trying to do versioning and preventing people from using older versions. Maybe they like the old one!", "url": "https://github.com/baconpaul/airwin2rack/blob/ecda72c1ec5211885bf4f83e61abae80e0457cd3/src/autogen_airwin/TapeBias.cpp#L45-L50", "sha": "ecda72c1ec5211885bf4f83e61abae80e0457cd3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pinParameter", "code": "static float pinParameter(float data)\n{\n\tif (data < 0.0f) return 0.0f;\n\tif (data > 1.0f) return 1.0f;\n\treturn data;\n}", "docstring": "//airwindows likes to ignore this stuff. Make your own programs, and make a different plugin rather than\n//trying to do versioning and preventing people from using older versions. Maybe they like the old one!", "url": "https://github.com/baconpaul/airwin2rack/blob/ecda72c1ec5211885bf4f83e61abae80e0457cd3/src/autogen_airwin/PurestWarm.cpp#L42-L47", "sha": "ecda72c1ec5211885bf4f83e61abae80e0457cd3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BrightAmbience::getEffectName", "code": "bool BrightAmbience::getEffectName(char* name) {\n vst_strncpy(name, \"BrightAmbience\", kVstMaxProductStrLen); return true;\n}", "docstring": "// 1 = yes, -1 = no, 0 = don't know", "url": "https://github.com/baconpaul/airwin2rack/blob/ecda72c1ec5211885bf4f83e61abae80e0457cd3/src/autogen_airwin/BrightAmbience.cpp#L101-L103", "sha": "ecda72c1ec5211885bf4f83e61abae80e0457cd3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BezEQ::getEffectName", "code": "bool BezEQ::getEffectName(char* name) {\n vst_strncpy(name, \"BezEQ\", kVstMaxProductStrLen); return true;\n}", "docstring": "// 1 = yes, -1 = no, 0 = don't know", "url": "https://github.com/baconpaul/airwin2rack/blob/ecda72c1ec5211885bf4f83e61abae80e0457cd3/src/autogen_airwin/BezEQ.cpp#L115-L117", "sha": "ecda72c1ec5211885bf4f83e61abae80e0457cd3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "kdtreequerybox", "code": "ae_int_t kdtreequerybox(const kdtree &kdt,\n const real_1d_array &boxmin,\n const real_1d_array &boxmax,\n const xparams _xparams) {\n jmp_buf _break_jump;\n alglib_impl::ae_state _alglib_env_state;\n alglib_impl::ae_state_init(&_alglib_env_state);\n if (setjmp(_break_jump)) {\n#if !defined(AE_NO_EXCEPTIONS)\n _ALGLIB_CPP_EXCEPTION(_alglib_env_state.error_msg);\n#else\n _ALGLIB_SET_ERROR_FLAG(_alglib_env_state.error_msg);\n return 0;\n#endif\n }\n ae_state_set_break_jump(&_alglib_env_state, &_break_jump);\n if (_xparams.flags != 0x0)\n ae_state_set_flags(&_alglib_env_state, _xparams.flags);\n alglib_impl::ae_int_t result = alglib_impl::kdtreequerybox(const_cast(kdt.c_ptr()),\n const_cast(boxmin.c_ptr()),\n const_cast(boxmax.c_ptr()),\n &_alglib_env_state);\n alglib_impl::ae_state_clear(&_alglib_env_state);\n return *(reinterpret_cast(&result));\n}", "docstring": "/*************************************************************************\nBox query: all points within user-specified box.\n\nIMPORTANT: this function can not be used in multithreaded code because it\n uses internal temporary buffer of kd-tree object, which can not\n be shared between multiple threads. If you want to perform\n parallel requests, use function which uses external request\n buffer: KDTreeTsQueryBox() (\"Ts\" stands for \"thread-safe\").\n\nINPUT PARAMETERS\n KDT - KD-tree\n BoxMin - lower bounds, array[0..NX-1].\n BoxMax - upper bounds, array[0..NX-1].\n\n\nRESULT\n number of actual neighbors found (in [0,N]).\n\nThis subroutine performs query and stores its result in the internal\nstructures of the KD-tree. You can use following subroutines to obtain\nthese results:\n* KDTreeQueryResultsX() to get X-values\n* KDTreeQueryResultsXY() to get X- and Y-values\n* KDTreeQueryResultsTags() to get tag values\n* KDTreeQueryResultsDistances() returns zeros for this request\n\nNOTE: this particular query returns unordered results, because there is no\n meaningful way of ordering points. Furthermore, no 'distance' is\n associated with points - it is either INSIDE or OUTSIDE (so request\n for distances will return zeros).\n\n -- ALGLIB --\n Copyright 14.05.2016 by Bochkanov Sergey\n*************************************************************************/", "url": "https://github.com/Xrvitd/GCNO/blob/bcae63774a2ecd72f862a76eab570ab6d1ffe432/src/Optimization/ALGLIB/alglibmisc.cpp#L1915-L1939", "sha": "bcae63774a2ecd72f862a76eab570ab6d1ffe432"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mlpissoftmax", "code": "ae_bool mlpissoftmax(multilayerperceptron *network, ae_state *_state) {\n ae_bool result;\n\n result = network->structinfo.ptr.p_int[6] == 1;\n return result;\n}", "docstring": "/*************************************************************************\nTells whether network is SOFTMAX-normalized (i.e. classifier) or not.\n\n -- ALGLIB --\n Copyright 04.11.2007 by Bochkanov Sergey\n*************************************************************************/", "url": "https://github.com/Xrvitd/GCNO/blob/bcae63774a2ecd72f862a76eab570ab6d1ffe432/src/Optimization/ALGLIB/dataanalysis.cpp#L22283-L22288", "sha": "bcae63774a2ecd72f862a76eab570ab6d1ffe432"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mlpeserialize", "code": "void mlpeserialize(ae_serializer *s,\n mlpensemble *ensemble,\n ae_state *_state) {\n\n ae_serializer_serialize_int(s, getmlpeserializationcode(_state), _state);\n ae_serializer_serialize_int(s, mlpe_mlpefirstversion, _state);\n ae_serializer_serialize_int(s, ensemble->ensemblesize, _state);\n serializerealarray(s, &ensemble->weights, -1, _state);\n serializerealarray(s, &ensemble->columnmeans, -1, _state);\n serializerealarray(s, &ensemble->columnsigmas, -1, _state);\n mlpserialize(s, &ensemble->network, _state);\n}", "docstring": "/*************************************************************************\nSerializer: serialization\n\n -- ALGLIB --\n Copyright 14.03.2011 by Bochkanov Sergey\n*************************************************************************/", "url": "https://github.com/Xrvitd/GCNO/blob/bcae63774a2ecd72f862a76eab570ab6d1ffe432/src/Optimization/ALGLIB/dataanalysis.cpp#L36987-L36998", "sha": "bcae63774a2ecd72f862a76eab570ab6d1ffe432"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lsfitcreatef", "code": "void lsfitcreatef(const real_2d_array &x,\n const real_1d_array &y,\n const real_1d_array &c,\n const ae_int_t n,\n const ae_int_t m,\n const ae_int_t k,\n const double diffstep,\n lsfitstate &state,\n const xparams _xparams) {\n jmp_buf _break_jump;\n alglib_impl::ae_state _alglib_env_state;\n alglib_impl::ae_state_init(&_alglib_env_state);\n if (setjmp(_break_jump)) {\n#if !defined(AE_NO_EXCEPTIONS)\n _ALGLIB_CPP_EXCEPTION(_alglib_env_state.error_msg);\n#else\n _ALGLIB_SET_ERROR_FLAG(_alglib_env_state.error_msg);\n return;\n#endif\n }\n ae_state_set_break_jump(&_alglib_env_state, &_break_jump);\n if (_xparams.flags != 0x0)\n ae_state_set_flags(&_alglib_env_state, _xparams.flags);\n alglib_impl::lsfitcreatef(const_cast(x.c_ptr()),\n const_cast(y.c_ptr()),\n const_cast(c.c_ptr()),\n n,\n m,\n k,\n diffstep,\n const_cast(state.c_ptr()),\n &_alglib_env_state);\n alglib_impl::ae_state_clear(&_alglib_env_state);\n return;\n}", "docstring": "/*************************************************************************\nNonlinear least squares fitting using function values only.\n\nCombination of numerical differentiation and secant updates is used to\nobtain function Jacobian.\n\nNonlinear task min(F(c)) is solved, where\n\n F(c) = (f(c,x[0])-y[0])^2 + ... + (f(c,x[n-1])-y[n-1])^2,\n\n * N is a number of points,\n * M is a dimension of a space points belong to,\n * K is a dimension of a space of parameters being fitted,\n * w is an N-dimensional vector of weight coefficients,\n * x is a set of N points, each of them is an M-dimensional vector,\n * c is a K-dimensional vector of parameters being fitted\n\nThis subroutine uses only f(c,x[i]).\n\nINPUT PARAMETERS:\n X - array[0..N-1,0..M-1], points (one row = one point)\n Y - array[0..N-1], function values.\n C - array[0..K-1], initial approximation to the solution,\n N - number of points, N>1\n M - dimension of space\n K - number of parameters being fitted\n DiffStep- numerical differentiation step;\n should not be very small or large;\n large = loss of accuracy\n small = growth of round-off errors\n\nOUTPUT PARAMETERS:\n State - structure which stores algorithm state\n\n -- ALGLIB --\n Copyright 18.10.2008 by Bochkanov Sergey\n*************************************************************************/", "url": "https://github.com/Xrvitd/GCNO/blob/bcae63774a2ecd72f862a76eab570ab6d1ffe432/src/Optimization/ALGLIB/interpolation.cpp#L12177-L12211", "sha": "bcae63774a2ecd72f862a76eab570ab6d1ffe432"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lsfit_internalchebyshevfit", "code": "static void lsfit_internalchebyshevfit(/* Real */ ae_vector *x,\n /* Real */ ae_vector *y,\n /* Real */ ae_vector *w,\n ae_int_t n,\n /* Real */ ae_vector *xc,\n /* Real */ ae_vector *yc,\n /* Integer */ ae_vector *dc,\n ae_int_t k,\n ae_int_t m,\n ae_int_t *info,\n /* Real */ ae_vector *c,\n lsfitreport *rep,\n ae_state *_state) {\n ae_frame _frame_block;\n ae_vector _xc;\n ae_vector _yc;\n ae_vector y2;\n ae_vector w2;\n ae_vector tmp;\n ae_vector tmp2;\n ae_vector tmpdiff;\n ae_vector bx;\n ae_vector by;\n ae_vector bw;\n ae_matrix fmatrix;\n ae_matrix cmatrix;\n ae_int_t i;\n ae_int_t j;\n double mx;\n double decay;\n\n ae_frame_make(_state, &_frame_block);\n memset(&_xc, 0, sizeof(_xc));\n memset(&_yc, 0, sizeof(_yc));\n memset(&y2, 0, sizeof(y2));\n memset(&w2, 0, sizeof(w2));\n memset(&tmp, 0, sizeof(tmp));\n memset(&tmp2, 0, sizeof(tmp2));\n memset(&tmpdiff, 0, sizeof(tmpdiff));\n memset(&bx, 0, sizeof(bx));\n memset(&by, 0, sizeof(by));\n memset(&bw, 0, sizeof(bw));\n memset(&fmatrix, 0, sizeof(fmatrix));\n memset(&cmatrix, 0, sizeof(cmatrix));\n ae_vector_init_copy(&_xc, xc, _state, ae_true);\n xc = &_xc;\n ae_vector_init_copy(&_yc, yc, _state, ae_true);\n yc = &_yc;\n *info = 0;\n ae_vector_clear(c);\n _lsfitreport_clear(rep);\n ae_vector_init(&y2, 0, DT_REAL, _state, ae_true);\n ae_vector_init(&w2, 0, DT_REAL, _state, ae_true);\n ae_vector_init(&tmp, 0, DT_REAL, _state, ae_true);\n ae_vector_init(&tmp2, 0, DT_REAL, _state, ae_true);\n ae_vector_init(&tmpdiff, 0, DT_REAL, _state, ae_true);\n ae_vector_init(&bx, 0, DT_REAL, _state, ae_true);\n ae_vector_init(&by, 0, DT_REAL, _state, ae_true);\n ae_vector_init(&bw, 0, DT_REAL, _state, ae_true);\n ae_matrix_init(&fmatrix, 0, 0, DT_REAL, _state, ae_true);\n ae_matrix_init(&cmatrix, 0, 0, DT_REAL, _state, ae_true);\n\n lsfit_clearreport(rep, _state);\n\n /*\n * weight decay for correct handling of task which becomes\n * degenerate after constraints are applied\n */\n decay = 10000 * ae_machineepsilon;\n\n /*\n * allocate space, initialize/fill:\n * * FMatrix- values of basis functions at X[]\n * * CMatrix- values (derivatives) of basis functions at XC[]\n * * fill constraints matrix\n * * fill first N rows of design matrix with values\n * * fill next M rows of design matrix with regularizing term\n * * append M zeros to Y\n * * append M elements, mean(abs(W)) each, to W\n */\n ae_vector_set_length(&y2, n + m, _state);\n ae_vector_set_length(&w2, n + m, _state);\n ae_vector_set_length(&tmp, m, _state);\n ae_vector_set_length(&tmpdiff, m, _state);\n ae_matrix_set_length(&fmatrix, n + m, m, _state);\n if (k > 0) {\n ae_matrix_set_length(&cmatrix, k, m + 1, _state);\n }\n\n /*\n * Fill design matrix, Y2, W2:\n * * first N rows with basis functions for original points\n * * next M rows with decay terms\n */\n for (i = 0; i <= n - 1; i++) {\n\n /*\n * prepare Ith row\n * use Tmp for calculations to avoid multidimensional arrays overhead\n */\n for (j = 0; j <= m - 1; j++) {\n if (j == 0) {\n tmp.ptr.p_double[j] = (double) (1);\n } else {\n if (j == 1) {\n tmp.ptr.p_double[j] = x->ptr.p_double[i];\n } else {\n tmp.ptr.p_double[j] = 2 * x->ptr.p_double[i] * tmp.ptr.p_double[j - 1] - tmp.ptr.p_double[j - 2];\n }\n }\n }\n ae_v_move(&fmatrix.ptr.pp_double[i][0], 1, &tmp.ptr.p_double[0], 1, ae_v_len(0, m - 1));\n }\n for (i = 0; i <= m - 1; i++) {\n for (j = 0; j <= m - 1; j++) {\n if (i == j) {\n fmatrix.ptr.pp_double[n + i][j] = decay;\n } else {\n fmatrix.ptr.pp_double[n + i][j] = (double) (0);\n }\n }\n }\n ae_v_move(&y2.ptr.p_double[0], 1, &y->ptr.p_double[0], 1, ae_v_len(0, n - 1));\n ae_v_move(&w2.ptr.p_double[0], 1, &w->ptr.p_double[0], 1, ae_v_len(0, n - 1));\n mx = (double) (0);\n for (i = 0; i <= n - 1; i++) {\n mx = mx + ae_fabs(w->ptr.p_double[i], _state);\n }\n mx = mx / n;\n for (i = 0; i <= m - 1; i++) {\n y2.ptr.p_double[n + i] = (double) (0);\n w2.ptr.p_double[n + i] = mx;\n }\n\n /*\n * fill constraints matrix\n */\n for (i = 0; i <= k - 1; i++) {\n\n /*\n * prepare Ith row\n * use Tmp for basis function values,\n * TmpDiff for basos function derivatives\n */\n for (j = 0; j <= m - 1; j++) {\n if (j == 0) {\n tmp.ptr.p_double[j] = (double) (1);\n tmpdiff.ptr.p_double[j] = (double) (0);\n } else {\n if (j == 1) {\n tmp.ptr.p_double[j] = xc->ptr.p_double[i];\n tmpdiff.ptr.p_double[j] = (double) (1);\n } else {\n tmp.ptr.p_double[j] = 2 * xc->ptr.p_double[i] * tmp.ptr.p_double[j - 1] - tmp.ptr.p_double[j - 2];\n tmpdiff.ptr.p_double[j] = 2 * (tmp.ptr.p_double[j - 1] + xc->ptr.p_double[i] * tmpdiff.ptr.p_double[j - 1])\n - tmpdiff.ptr.p_double[j - 2];\n }\n }\n }\n if (dc->ptr.p_int[i] == 0) {\n ae_v_move(&cmatrix.ptr.pp_double[i][0], 1, &tmp.ptr.p_double[0], 1, ae_v_len(0, m - 1));\n }\n if (dc->ptr.p_int[i] == 1) {\n ae_v_move(&cmatrix.ptr.pp_double[i][0], 1, &tmpdiff.ptr.p_double[0], 1, ae_v_len(0, m - 1));\n }\n cmatrix.ptr.pp_double[i][m] = yc->ptr.p_double[i];\n }\n\n /*\n * Solve constrained task\n */\n if (k > 0) {\n\n /*\n * solve using regularization\n */\n lsfitlinearwc(&y2, &w2, &fmatrix, &cmatrix, n + m, m, k, info, c, rep, _state);\n } else {\n\n /*\n * no constraints, no regularization needed\n */\n lsfitlinearwc(y, w, &fmatrix, &cmatrix, n, m, 0, info, c, rep, _state);\n }\n if (*info < 0) {\n ae_frame_leave(_state);\n return;\n }\n ae_frame_leave(_state);\n}", "docstring": "/*************************************************************************\nThis is internal function for Chebyshev fitting.\n\nIt assumes that input data are normalized:\n* X/XC belong to [-1,+1],\n* mean(Y)=0, stddev(Y)=1.\n\nIt does not checks inputs for errors.\n\nThis function is used to fit general (shifted) Chebyshev models, power\nbasis models or barycentric models.\n\nINPUT PARAMETERS:\n X - points, array[0..N-1].\n Y - function values, array[0..N-1].\n W - weights, array[0..N-1]\n N - number of points, N>0.\n XC - points where polynomial values/derivatives are constrained,\n array[0..K-1].\n YC - values of constraints, array[0..K-1]\n DC - array[0..K-1], types of constraints:\n * DC[i]=0 means that P(XC[i])=YC[i]\n * DC[i]=1 means that P'(XC[i])=YC[i]\n K - number of constraints, 0<=K=1\n\nOUTPUT PARAMETERS:\n Info- same format as in LSFitLinearW() subroutine:\n * Info>0 task is solved\n * Info<=0 an error occured:\n -4 means inconvergence of internal SVD\n -3 means inconsistent constraints\n C - interpolant in Chebyshev form; [-1,+1] is used as base interval\n Rep - report, same format as in LSFitLinearW() subroutine.\n Following fields are set:\n * RMSError rms error on the (X,Y).\n * AvgError average error on the (X,Y).\n * AvgRelError average relative error on the non-zero Y\n * MaxError maximum error\n NON-WEIGHTED ERRORS ARE CALCULATED\n\nIMPORTANT:\n this subroitine doesn't calculate task's condition number for K<>0.\n\n -- ALGLIB PROJECT --\n Copyright 10.12.2009 by Bochkanov Sergey\n*************************************************************************/", "url": "https://github.com/Xrvitd/GCNO/blob/bcae63774a2ecd72f862a76eab570ab6d1ffe432/src/Optimization/ALGLIB/interpolation.cpp#L38128-L38317", "sha": "bcae63774a2ecd72f862a76eab570ab6d1ffe432"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cmatrixqrunpackr", "code": "void cmatrixqrunpackr(/* Complex */ ae_matrix *a,\n ae_int_t m,\n ae_int_t n,\n /* Complex */ ae_matrix *r,\n ae_state *_state) {\n ae_int_t i;\n ae_int_t k;\n\n ae_matrix_clear(r);\n\n if (m <= 0 || n <= 0) {\n return;\n }\n k = ae_minint(m, n, _state);\n ae_matrix_set_length(r, m, n, _state);\n for (i = 0; i <= n - 1; i++) {\n r->ptr.pp_complex[0][i] = ae_complex_from_i(0);\n }\n for (i = 1; i <= m - 1; i++) {\n ae_v_cmove(&r->ptr.pp_complex[i][0], 1, &r->ptr.pp_complex[0][0], 1, \"N\", ae_v_len(0, n - 1));\n }\n for (i = 0; i <= k - 1; i++) {\n ae_v_cmove(&r->ptr.pp_complex[i][i], 1, &a->ptr.pp_complex[i][i], 1, \"N\", ae_v_len(i, n - 1));\n }\n}", "docstring": "/*************************************************************************\nUnpacking of matrix R from the QR decomposition of a matrix A\n\nInput parameters:\n A - matrices Q and R in compact form.\n Output of CMatrixQR subroutine.\n M - number of rows in given matrix A. M>=0.\n N - number of columns in given matrix A. N>=0.\n\nOutput parameters:\n R - matrix R, array[0..M-1, 0..N-1].\n\n -- ALGLIB routine --\n 17.02.2010\n Bochkanov Sergey\n*************************************************************************/", "url": "https://github.com/Xrvitd/GCNO/blob/bcae63774a2ecd72f862a76eab570ab6d1ffe432/src/Optimization/ALGLIB/linalg.cpp#L35642-L35666", "sha": "bcae63774a2ecd72f862a76eab570ab6d1ffe432"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "minnsrestartfrom", "code": "void minnsrestartfrom(const minnsstate &state, const real_1d_array &x, const xparams _xparams)\n{\n jmp_buf _break_jump;\n alglib_impl::ae_state _alglib_env_state;\n alglib_impl::ae_state_init(&_alglib_env_state);\n if( setjmp(_break_jump) )\n {\n#if !defined(AE_NO_EXCEPTIONS)\n _ALGLIB_CPP_EXCEPTION(_alglib_env_state.error_msg);\n#else\n _ALGLIB_SET_ERROR_FLAG(_alglib_env_state.error_msg);\n return;\n#endif\n }\n ae_state_set_break_jump(&_alglib_env_state, &_break_jump);\n if( _xparams.flags!=0x0 )\n ae_state_set_flags(&_alglib_env_state, _xparams.flags);\n alglib_impl::minnsrestartfrom(const_cast(state.c_ptr()), const_cast(x.c_ptr()), &_alglib_env_state);\n alglib_impl::ae_state_clear(&_alglib_env_state);\n return;\n}", "docstring": "/*************************************************************************\nThis subroutine restarts algorithm from new point.\nAll optimization parameters (including constraints) are left unchanged.\n\nThis function allows to solve multiple optimization problems (which\nmust have same number of dimensions) without object reallocation penalty.\n\nINPUT PARAMETERS:\n State - structure previously allocated with minnscreate() call.\n X - new starting point.\n\n -- ALGLIB --\n Copyright 18.05.2015 by Bochkanov Sergey\n*************************************************************************/", "url": "https://github.com/Xrvitd/GCNO/blob/bcae63774a2ecd72f862a76eab570ab6d1ffe432/src/Optimization/ALGLIB/optimization.cpp#L13529-L13549", "sha": "bcae63774a2ecd72f862a76eab570ab6d1ffe432"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "invfdistribution", "code": "double invfdistribution(const ae_int_t a, const ae_int_t b, const double y, const xparams _xparams) {\n jmp_buf _break_jump;\n alglib_impl::ae_state _alglib_env_state;\n alglib_impl::ae_state_init(&_alglib_env_state);\n if (setjmp(_break_jump)) {\n#if !defined(AE_NO_EXCEPTIONS)\n _ALGLIB_CPP_EXCEPTION(_alglib_env_state.error_msg);\n#else\n _ALGLIB_SET_ERROR_FLAG(_alglib_env_state.error_msg);\n return 0;\n#endif\n }\n ae_state_set_break_jump(&_alglib_env_state, &_break_jump);\n if (_xparams.flags != 0x0)\n ae_state_set_flags(&_alglib_env_state, _xparams.flags);\n double result = alglib_impl::invfdistribution(a, b, y, &_alglib_env_state);\n alglib_impl::ae_state_clear(&_alglib_env_state);\n return *(reinterpret_cast(&result));\n}", "docstring": "/*************************************************************************\nInverse of complemented F distribution\n\nFinds the F density argument x such that the integral\nfrom x to infinity of the F density is equal to the\ngiven probability p.\n\nThis is accomplished using the inverse beta integral\nfunction and the relations\n\n z = incbi( df2/2, df1/2, p )\n x = df2 (1-z) / (df1 z).\n\nNote: the following relations hold for the inverse of\nthe uncomplemented F distribution:\n\n z = incbi( df1/2, df2/2, p )\n x = df2 z / (df1 (1-z)).\n\nACCURACY:\n\nTested at random points (a,b,p).\n\n a,b Relative error:\narithmetic domain # trials peak rms\n For p between .001 and 1:\n IEEE 1,100 100000 8.3e-15 4.7e-16\n IEEE 1,10000 100000 2.1e-11 1.4e-13\n For p between 10^-6 and 10^-3:\n IEEE 1,100 50000 1.3e-12 8.4e-15\n IEEE 1,10000 50000 3.0e-12 4.8e-14\n\nCephes Math Library Release 2.8: June, 2000\nCopyright 1984, 1987, 1995, 2000 by Stephen L. Moshier\n*************************************************************************/", "url": "https://github.com/Xrvitd/GCNO/blob/bcae63774a2ecd72f862a76eab570ab6d1ffe432/src/Optimization/ALGLIB/specialfunctions.cpp#L2156-L2174", "sha": "bcae63774a2ecd72f862a76eab570ab6d1ffe432"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DelayModule::~DelayModule", "code": "DelayModule::~DelayModule() {\n // No Code Needed\n}", "docstring": "// Destructor", "url": "https://github.com/bkshepherd/DaisySeedProjects/blob/c95af83f75aa866f1a8e12be56920ca11d76cf93/Software/GuitarPedal/Effect-Modules/delay_module.cpp#L140-L142", "sha": "c95af83f75aa866f1a8e12be56920ca11d76cf93"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FWRetract::M208", "code": "void FWRetract::M208() {\n if (!parser.seen(\"FSRW\")) return M208_report();\n if (parser.seen('S')) settings.retract_recover_extra = parser.value_axis_units(E_AXIS);\n if (parser.seen('F')) settings.retract_recover_feedrate_mm_s = MMM_TO_MMS(parser.value_axis_units(E_AXIS));\n if (parser.seen('R')) settings.swap_retract_recover_feedrate_mm_s = MMM_TO_MMS(parser.value_axis_units(E_AXIS));\n if (parser.seen('W')) settings.swap_retract_recover_extra = parser.value_axis_units(E_AXIS);\n}", "docstring": "/**\n * M208: Set firmware un-retraction values\n *\n * S[+units] retract_recover_extra (in addition to M207 S*)\n * W[+units] swap_retract_recover_extra (multi-extruder)\n * F[units/min] retract_recover_feedrate_mm_s\n * R[units/min] swap_retract_recover_feedrate_mm_s\n */", "url": "https://github.com/hillsoftware/sv06/blob/00d5ee302758703c9a959309716b5f720d750afc/Marlin/src/feature/fwretract.cpp#L233-L239", "sha": "00d5ee302758703c9a959309716b5f720d750afc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fontgroup_drawstring", "code": "static void fontgroup_drawstring(font_group_t *group, const font_t *fnt_default, const char *utf8_msg, read_byte_cb_t cb_read_byte, void * userdata, fontgroup_cb_draw_t cb_draw_ram) {\n const uint8_t *p = (uint8_t*)utf8_msg;\n for (;;) {\n lchar_t wc;\n p = get_utf8_value_cb(p, cb_read_byte, wc);\n if (!wc) break;\n fontgroup_drawwchar(group, fnt_default, wc, userdata, cb_draw_ram);\n }\n}", "docstring": "/**\n * @brief try to process a utf8 string\n *\n * @param pu8g : U8G pointer\n * @param fnt_default : the default font\n * @param utf8_msg : the UTF-8 string\n * @param cb_read_byte : how to read the utf8_msg, from RAM or ROM (call read_byte_ram or pgm_read_byte)\n * @param userdata : User's data\n * @param cb_draw_ram : the callback function of userdata to draw a !RAM! string (actually it is to draw a one byte string in RAM)\n *\n * @return N/A\n *\n * Get the screen pixel width of a ROM UTF-8 string\n */", "url": "https://github.com/hillsoftware/sv06/blob/00d5ee302758703c9a959309716b5f720d750afc/Marlin/src/lcd/dogm/u8g_fontutf8.cpp#L106-L114", "sha": "00d5ee302758703c9a959309716b5f720d750afc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MarlinUI::init_lcd", "code": "void MarlinUI::init_lcd() { DWIN_Startup(); }", "docstring": "// Initialize or re-initialize the LCD", "url": "https://github.com/hillsoftware/sv06/blob/00d5ee302758703c9a959309716b5f720d750afc/Marlin/src/lcd/e3v2/marlinui/ui_common.cpp#L82-L82", "sha": "00d5ee302758703c9a959309716b5f720d750afc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DWIN_Print_Header", "code": "void DWIN_Print_Header(const char *text = nullptr) {\n static char headertxt[31] = \"\"; // Print header text\n if (text) {\n const int8_t size = _MIN(30U, strlen_P(text));\n LOOP_L_N(i, size) headertxt[i] = text[i];\n headertxt[size] = '\\0';\n }\n if (checkkey == PrintProcess || checkkey == PrintDone) {\n DWIN_Draw_Rectangle(1, HMI_data.Background_Color, 0, 60, DWIN_WIDTH, 60+16);\n DWINUI::Draw_CenteredString(60, headertxt);\n }\n}", "docstring": "// Update filename on print", "url": "https://github.com/hillsoftware/sv06/blob/00d5ee302758703c9a959309716b5f720d750afc/Marlin/src/lcd/e3v2/proui/dwin.cpp#L601-L612", "sha": "00d5ee302758703c9a959309716b5f720d750afc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGui::DockContextNewFrameUpdateUndocking", "code": "void ImGui::DockContextNewFrameUpdateUndocking(ImGuiContext* ctx)\n{\n ImGuiContext& g = *ctx;\n ImGuiDockContext* dc = &ctx->DockContext;\n if (!(g.IO.ConfigFlags & ImGuiConfigFlags_DockingEnable))\n {\n if (dc->Nodes.Data.Size > 0 || dc->Requests.Size > 0)\n DockContextClearNodes(ctx, 0, true);\n return;\n }\n\n // Setting NoSplit at runtime merges all nodes\n if (g.IO.ConfigDockingNoSplit)\n for (int n = 0; n < dc->Nodes.Data.Size; n++)\n if (ImGuiDockNode* node = (ImGuiDockNode*)dc->Nodes.Data[n].val_p)\n if (node->IsRootNode() && node->IsSplitNode())\n {\n DockBuilderRemoveNodeChildNodes(node->ID);\n //dc->WantFullRebuild = true;\n }\n\n // Process full rebuild\n#if 0\n if (ImGui::IsKeyPressed(ImGui::GetKeyIndex(ImGuiKey_C)))\n dc->WantFullRebuild = true;\n#endif\n if (dc->WantFullRebuild)\n {\n DockContextRebuildNodes(ctx);\n dc->WantFullRebuild = false;\n }\n\n // Process Undocking requests (we need to process them _before_ the UpdateMouseMovingWindowNewFrame call in NewFrame)\n for (ImGuiDockRequest& req : dc->Requests)\n {\n if (req.Type == ImGuiDockRequestType_Undock && req.UndockTargetWindow)\n DockContextProcessUndockWindow(ctx, req.UndockTargetWindow);\n else if (req.Type == ImGuiDockRequestType_Undock && req.UndockTargetNode)\n DockContextProcessUndockNode(ctx, req.UndockTargetNode);\n }\n}", "docstring": "// Docking context update function, called by NewFrame()", "url": "https://github.com/djhackersdev/bemanitools/blob/e0ff83f664f9282bca2fd5ddf1b4dc8cfa392c0a/src/main/imgui/imgui.cpp#L16843-L16883", "sha": "e0ff83f664f9282bca2fd5ddf1b4dc8cfa392c0a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UDialogueEntry::PreEditChange", "code": "void UDialogueEntry::PreEditChange(FProperty* PropertyAboutToChange)\n{\n\tSuper::PreEditChange(PropertyAboutToChange);\n\tif (PropertyAboutToChange->GetFName() != GET_MEMBER_NAME_CHECKED(UDialogueEntry, Transition) ||\n\t\tEdges.Num() == 0)\n\t{\n\t\treturn;\n\t}\n\n\tconst FString Message = \"Are you sure you want to update the transition type? \"\n\t\t\t\t\t\"This will break all connections from the current node\";\n\tif (FMessageDialog::Open(EAppMsgType::YesNo, FText::FromString(Message)) != EAppReturnType::Yes)\n\t{\n\t\tBlockTransitionUpdate = true;\n\t\tPreviousTransition = Transition;\n\t}\t\n}", "docstring": "/**\n * @brief This is called when a property is about to be modified externally\n * @param PropertyAboutToChange The property that is about to be modified\n */", "url": "https://github.com/hfjooste/UltimateStarterKit/blob/f8ba35499f4f9a0053abad502ff808d7a8c898ec/Plugins/USK/Source/USK/Dialogue/DialogueEntry.cpp#L40-L56", "sha": "f8ba35499f4f9a0053abad502ff808d7a8c898ec"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UFpsCounter::UpdateVisibility", "code": "void UFpsCounter::UpdateVisibility(bool IsVisible)\n{\n\tconst bool WasHidden = GetVisibility() == ESlateVisibility::Collapsed ||\n\t\tGetVisibility() == ESlateVisibility::Hidden; \n\tUSK_LOG_INFO(*FString::Format(TEXT(\"Updating visibility to {0}\"), { IsVisible }));\n\tSetVisibility(IsVisible ? ESlateVisibility::Visible : ESlateVisibility::Collapsed);\n\n\tif (WasHidden && IsVisible)\n\t{\n\t\tUpdateFramerateAfterDelay();\n\t}\n}", "docstring": "/**\n * @brief Update the visibility of the widget\n * @param IsVisible Is the widget visible?\n */", "url": "https://github.com/hfjooste/UltimateStarterKit/blob/f8ba35499f4f9a0053abad502ff808d7a8c898ec/Plugins/USK/Source/USK/Widgets/FpsCounter.cpp#L17-L28", "sha": "f8ba35499f4f9a0053abad502ff808d7a8c898ec"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "platform_get_buttons", "code": "uint8_t platform_get_buttons()\n{\n return 0;\n}", "docstring": "// Called periodically to get the state of any buttons installed on the platform.\n// If none are installed the below function is fine.", "url": "https://github.com/BlueSCSI/BlueSCSI-v2/blob/24cc0fb957b984ebc756e5651e871bfdd2ad8168/lib/BlueSCSI_platform_template/BlueSCSI_platform.cpp#L64-L67", "sha": "24cc0fb957b984ebc756e5651e871bfdd2ad8168"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "onSendFilePrep", "code": "void onSendFilePrep(char * dir_name)\n{\n char file_name[32+1];\n\n scsiEnterPhase(DATA_OUT);\n scsiRead(static_cast(static_cast(file_name)), 32+1, NULL);\n file_name[32] = '\\0';\n\n debuglog(\"TOOLBOX OPEN FILE FOR WRITE: '\", file_name, \"'\");\n SD.chdir(dir_name);\n gFile.open(file_name, FILE_WRITE);\n SD.chdir(\"/\");\n if(gFile.isOpen() && gFile.isWritable())\n {\n gFile.rewind();\n gFile.sync();\n // do i need to manually set phase to status here?\n return;\n } else {\n gFile.close();\n scsiDev.status = CHECK_CONDITION;\n scsiDev.target->sense.code = ILLEGAL_REQUEST;\n //SCSI_ASC_INVALID_FIELD_IN_CDB\n scsiDev.phase = STATUS;\n }\n}", "docstring": "/*\n Prepares a file for receiving. The file name is null terminated in the scsi data.\n*/", "url": "https://github.com/BlueSCSI/BlueSCSI-v2/blob/24cc0fb957b984ebc756e5651e871bfdd2ad8168/src/BlueSCSI_Toolbox.cpp#L251-L276", "sha": "24cc0fb957b984ebc756e5651e871bfdd2ad8168"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MTFS::CreateDir", "code": "bool MTFS::CreateDir(char * szDir) {\n\tPRM1B\tprm;\n\n\t/* Error check that the file name length is acceptable.\n\t*/\n\tif (!mtds.FCheckName(szDir, clsCmdFs, cmdFsMkdir)) {\n\t\treturn false;\n\t}\n\n\t/* Send the command packet.\n\t*/\n\tprm.valB1 = strlen(szDir)+1;\n\n\tmtds.MtdsProcessCmdWr(clsCmdFs, cmdFsMkdir, sizeof(prm), (uint8_t *)&prm,\n\t\t\t\t\t\tprm.valB1, (uint8_t *)szDir);\n\n\t/* Check for error and return failure.\n\t*/\n\tif (prhdrMtdsRet->sta != staCmdSuccess) {\n\t\treturn false;\n\t}\n\n\t/* Return success.\n\t*/\n\treturn true;\n\n}", "docstring": "/* ------------------------------------------------------------ */\n/***\tMTFS::CreateDir(szDir)\n**\n**\tParameters:\n**\t\tszDir\t\t- path string of directory to create\n**\n**\tReturn Values:\n**\t\tnone\n**\n**\tErrors:\n**\t\tReturns true if successful, false if not.\n**\n**\tDescription:\n**\t\tCreate the specified directory.\n*/", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodMTDS_v1_0/drivers/PmodMTDS_v1_0/src/MtdsFs.cpp#L285-L311", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MTDS::GetTchMoveDelta", "code": "bool MTDS::GetTchMoveDelta(int16_t * pdxco, int16_t * pdyco) {\n\tRET4B *\tpret = (RET4B *)&rgbMtdsRetVal[sizeof(RHDR)];\n\n\t/* Send the command packet.\n\t*/\n\tMtdsProcessCmdWr(clsCmdUtil, cmdUtilGetTchMoveDelta, 0, 0, 0, 0);\n\n\t/* Check for error and return failure if so.\n\t*/\n\tif (prhdrMtdsRet->sta != staCmdSuccess) {\n\t\treturn false;\n\t}\n\n\t/* Return the touch panel threshold settings.\n\t*/\n\tif (pdxco != 0) {\n\t\t*pdxco = pret->valB1;\n\t}\n\tif (pdyco != 0) {\n\t\t*pdyco = pret->valB2;\n\t}\n\treturn true;\n\n}", "docstring": "/* ------------------------------------------------------------ */\n/***\tMTDS::GetTchMoveDelta(pdxco, pdyco)\n**\n**\tParameters:\n**\t\tpdxco\t- pointer to variable to receive touch dxco threshold\n**\t\tpdyco\t- pointer to variable to receive touch dyco threshold\n**\n**\tReturn Values:\n**\t\tReturns the current touch panel finger move threshold delta values\n**\n**\tErrors:\n**\t\tReturns true if successful, false if error.\n**\n**\tDescription:\n**\t\tReturns the current finger move delta thresholds for the touch panel.\n*/", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodMTDS_v1_0/drivers/PmodMTDS_v1_0/src/MtdsUtil.cpp#L661-L684", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MTDS::end", "code": "void MTDS::end() {\n\n\tfInitialized = false;\n\n}", "docstring": "/* ------------------------------------------------------------ */\n/***\tMTDS::end()\n**\n**\tParameters:\n**\t\tnone\n**\n**\tReturn Values:\n**\t\tnone\n**\n**\tErrors:\n**\t\tnone\n**\n**\tDescription:\n**\t\tTell the display shield that we aren't interested in talking to it anymore.\n**\t\tAfter calling this, it is necessary to call MTDS::begin() in order to start\n**\t\tusing it again.\n*/", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodMTDS_v1_0/drivers/PmodMTDS_v1_0/src/mtds.cpp#L229-L233", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "readTwoBytes", "code": "word readTwoBytes(int in_adr_hi, int in_adr_lo)\n{\n word retVal = -1;\n \n /* 读低位 */\n Wire.beginTransmission(_ams5600_Address);\n Wire.write(in_adr_lo);\n Wire.endTransmission();\n Wire.requestFrom(_ams5600_Address, 1);\n while(Wire.available() == 0);\n int low = Wire.read();\n \n /* 读高位 */ \n Wire.beginTransmission(_ams5600_Address);\n Wire.write(in_adr_hi);\n Wire.endTransmission();\n Wire.requestFrom(_ams5600_Address, 1);\n while(Wire.available() == 0);\n int high = Wire.read();\n \n retVal = (high << 8) | low;\n \n return retVal;\n}", "docstring": "//readTwoBytes(int in_adr_hi, int in_adr_lo)这段代码是一个函数,其目的是从I2C设备(在代码中的变量名为_ams5600_Address)中读取两个字节数据,并将其合并成一个16位的无符号整数返回。\n//具体来说,函数接受两个整型参数in_adr_hi和in_adr_lo,它们用于指定需要读取的两个字节数据的地址。函数中首先通过Wire库开始I2C传输,向设备写入in_adr_lo和in_adr_hi分别作为数据地址,然后读取相应的字节数据。\n//在每个Wire.requestFrom()调用之后,通过一个while循环等待数据接收完毕。然后读取接收到的低字节和高字节,并使用位运算将它们合并成一个16位的无符号整数。\n//最后,返回合并后的整数。如果读取过程中出现错误或者函数没有成功读取到数据,则函数返回-1。", "url": "https://github.com/ToanTech/DengFOC_Lib/blob/8494ec97ea240691472a36fe3282c00fefcf32e2/DengFOC 库/V0.1 电压力矩 位置闭环/AS5600.cpp#L27-L50", "sha": "8494ec97ea240691472a36fe3282c00fefcf32e2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VIA6522::VIA6522", "code": "VIA6522::VIA6522(int tag)\n : m_tag(tag)\n{\n #if DEBUG6522\n m_tick = 0;\n #endif\n}", "docstring": "/////////////////////////////////////////////////////////////////////////////////////////////\n// VIA (6522 - Versatile Interface Adapter)", "url": "https://github.com/EremusOne/ESPectrum/blob/a501be723a08f3817787cd90fe77a45b476e38a4/components/fabgl/src/emudevs/VIA6522.cpp#L45-L51", "sha": "a501be723a08f3817787cd90fe77a45b476e38a4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OSD::pref_rom_menu", "code": "void OSD::pref_rom_menu() {\n\n menu_curopt = 1;\n menu_saverect = true;\n\n while (1) {\n\n menu_level = 2;\n uint8_t opt2 = menuRun(MENU_ROM_PREF[Config::lang]);\n\n if (opt2) {\n\n menu_level = 3;\n menu_curopt = 1;\n menu_saverect = true;\n\n if (opt2 == 1) {\n\n const string menu_res[] = {\"48K\",\"48Kes\",\"48Kcs\",\"Last\"};\n\n while (1) {\n\n string rpref_menu = MENU_ROM_PREF_48[Config::lang];\n\n menu_curopt = prepare_checkbox_menu(rpref_menu,Config::pref_romSet_48);\n\n int opt3 = menuRun(rpref_menu);\n menu_saverect = false;\n\n if (opt3 == 0) break;\n\n if (opt3 != menu_curopt) {\n Config::pref_romSet_48 = menu_res[opt3 - 1];\n Config::save(\"pref_romSet_48\");\n }\n\n }\n\n } else if (opt2 == 2) {\n\n const string menu_res[] = {\"128K\",\"128Kes\",\"+2\",\"+2es\",\"ZX81+\",\"128Kcs\",\"Last\"};\n\n while (1) {\n\n string rpref_menu = MENU_ROM_PREF_128[Config::lang];\n\n menu_curopt = prepare_checkbox_menu(rpref_menu,Config::pref_romSet_128);\n\n int opt3 = menuRun(rpref_menu);\n menu_saverect = false;\n\n if (opt3 == 0) break;\n\n if (opt3 != menu_curopt) {\n Config::pref_romSet_128 = menu_res[opt3 - 1];\n Config::save(\"pref_romSet_128\");\n }\n\n }\n\n } else if (opt2 == 3) {\n\n const string menu_res[] = {\"v1es\",\"v1pt\",\"v2es\",\"v2pt\",\"v3es\",\"v3pt\",\"v3en\",\"TKcs\",\"Last\"};\n\n while (1) {\n\n string rpref_menu = MENU_ROM_PREF_TK90X[Config::lang];\n\n menu_curopt = prepare_checkbox_menu(rpref_menu,Config::pref_romSet_TK90X);\n\n int opt3 = menuRun(rpref_menu);\n menu_saverect = false;\n\n if (opt3 == 0) break;\n\n if (opt3 != menu_curopt) {\n Config::pref_romSet_TK90X = menu_res[opt3 - 1];\n Config::save(\"pref_romSet_TK90X\");\n }\n\n }\n\n } else if (opt2 == 4) {\n\n const string menu_res[] = {\"95es\",\"95pt\",\"Last\"};\n\n while (1) {\n\n string rpref_menu = MENU_ROM_PREF_TK95[Config::lang];\n\n menu_curopt = prepare_checkbox_menu(rpref_menu,Config::pref_romSet_TK95);\n\n int opt3 = menuRun(rpref_menu);\n menu_saverect = false;\n\n if (opt3 == 0) break;\n\n if (opt3 != menu_curopt) {\n Config::pref_romSet_TK95 = menu_res[opt3 - 1];\n Config::save(\"pref_romSet_TK95\");\n }\n\n }\n\n }\n\n menu_curopt = opt2;\n menu_saverect = false;\n\n } else\n break;\n\n }\n\n menu_curopt = 3;\n\n}", "docstring": "// *******************************************************************************************************\n// PREFERRED ROM MENU\n// *******************************************************************************************************", "url": "https://github.com/EremusOne/ESPectrum/blob/a501be723a08f3817787cd90fe77a45b476e38a4/src/OSDMain.cpp#L674-L790", "sha": "a501be723a08f3817787cd90fe77a45b476e38a4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FtpClient::Noop", "code": "bool FtpClient::Noop()\n{\n\tif (!FtpSendCmd(\"NOOP\", \"25\", mp_ftphandle))\n\t\treturn 0;\n\treturn 1;\n}", "docstring": "/*\n * send a NOOP cmd to keep connection alive\n *\n * return 1 if successful, 0 otherwise\n */", "url": "https://github.com/cy33hc/ps4-ezremote-client/blob/b7fe46cb94310f8591275d3c34107b5944137a4a/source/clients/ftpclient.cpp#L1190-L1195", "sha": "b7fe46cb94310f8591275d3c34107b5944137a4a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImBezierCubicClosestPoint", "code": "ImVec2 ImBezierCubicClosestPoint(const ImVec2& p1, const ImVec2& p2, const ImVec2& p3, const ImVec2& p4, const ImVec2& p, int num_segments)\n{\n IM_ASSERT(num_segments > 0); // Use ImBezierCubicClosestPointCasteljau()\n ImVec2 p_last = p1;\n ImVec2 p_closest;\n float p_closest_dist2 = FLT_MAX;\n float t_step = 1.0f / (float)num_segments;\n for (int i_step = 1; i_step <= num_segments; i_step++)\n {\n ImVec2 p_current = ImBezierCubicCalc(p1, p2, p3, p4, t_step * i_step);\n ImVec2 p_line = ImLineClosestPoint(p_last, p_current, p);\n float dist2 = ImLengthSqr(p - p_line);\n if (dist2 < p_closest_dist2)\n {\n p_closest = p_line;\n p_closest_dist2 = dist2;\n }\n p_last = p_current;\n }\n return p_closest;\n}", "docstring": "//-----------------------------------------------------------------------------\n// [SECTION] MISC HELPERS/UTILITIES (Geometry functions)\n//-----------------------------------------------------------------------------", "url": "https://github.com/bayaraa/d2gl/blob/919df3528de5c5c1b41b326cf68821cd1981c42c/d2gl/vendor/include/imgui/imgui.cpp#L1510-L1530", "sha": "919df3528de5c5c1b41b326cf68821cd1981c42c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FindHoveredWindow", "code": "static void FindHoveredWindow()\n{\n ImGuiContext& g = *GImGui;\n\n ImGuiWindow* hovered_window = NULL;\n ImGuiWindow* hovered_window_ignoring_moving_window = NULL;\n if (g.MovingWindow && !(g.MovingWindow->Flags & ImGuiWindowFlags_NoMouseInputs))\n hovered_window = g.MovingWindow;\n\n ImVec2 padding_regular = g.Style.TouchExtraPadding;\n ImVec2 padding_for_resize = g.IO.ConfigWindowsResizeFromEdges ? g.WindowsHoverPadding : padding_regular;\n for (int i = g.Windows.Size - 1; i >= 0; i--)\n {\n ImGuiWindow* window = g.Windows[i];\n IM_MSVC_WARNING_SUPPRESS(28182); // [Static Analyzer] Dereferencing NULL pointer.\n if (!window->Active || window->Hidden)\n continue;\n if (window->Flags & ImGuiWindowFlags_NoMouseInputs)\n continue;\n\n // Using the clipped AABB, a child window will typically be clipped by its parent (not always)\n ImRect bb(window->OuterRectClipped);\n if (window->Flags & (ImGuiWindowFlags_ChildWindow | ImGuiWindowFlags_NoResize | ImGuiWindowFlags_AlwaysAutoResize))\n bb.Expand(padding_regular);\n else\n bb.Expand(padding_for_resize);\n if (!bb.Contains(g.IO.MousePos))\n continue;\n\n // Support for one rectangular hole in any given window\n // FIXME: Consider generalizing hit-testing override (with more generic data, callback, etc.) (#1512)\n if (window->HitTestHoleSize.x != 0)\n {\n ImVec2 hole_pos(window->Pos.x + (float)window->HitTestHoleOffset.x, window->Pos.y + (float)window->HitTestHoleOffset.y);\n ImVec2 hole_size((float)window->HitTestHoleSize.x, (float)window->HitTestHoleSize.y);\n if (ImRect(hole_pos, hole_pos + hole_size).Contains(g.IO.MousePos))\n continue;\n }\n\n if (hovered_window == NULL)\n hovered_window = window;\n IM_MSVC_WARNING_SUPPRESS(28182); // [Static Analyzer] Dereferencing NULL pointer.\n if (hovered_window_ignoring_moving_window == NULL && (!g.MovingWindow || window->RootWindow != g.MovingWindow->RootWindow))\n hovered_window_ignoring_moving_window = window;\n if (hovered_window && hovered_window_ignoring_moving_window)\n break;\n }\n\n g.HoveredWindow = hovered_window;\n g.HoveredWindowUnderMovingWindow = hovered_window_ignoring_moving_window;\n}", "docstring": "// Find window given position, search front-to-back\n// FIXME: Note that we have an inconsequential lag here: OuterRectClipped is updated in Begin(), so windows moved programmatically\n// with SetWindowPos() and not SetNextWindowPos() will have that rectangle lagging by a frame at the time FindHoveredWindow() is\n// called, aka before the next Begin(). Moving window isn't affected.", "url": "https://github.com/bayaraa/d2gl/blob/919df3528de5c5c1b41b326cf68821cd1981c42c/d2gl/vendor/include/imgui/imgui.cpp#L4988-L5038", "sha": "919df3528de5c5c1b41b326cf68821cd1981c42c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BdbRawEngine::OpenDb", "code": "int32_t BdbRawEngine::OpenDb(Db** dbpp, const char* file_name, DbEnv* envp, uint32_t extra_flags) {\n int ret;\n uint32_t open_flags;\n\n DbTxn* txn = nullptr;\n try {\n int ret = envp->txn_begin(nullptr, &txn, 0);\n if (ret != 0) {\n DINGO_LOG(ERROR) << fmt::format(\"[bdb] txn begin failed ret: {}.\", ret);\n return -1;\n }\n\n bdb_transaction_alive_count << 1;\n\n Db* db = new Db(envp, 0);\n\n // Point to the new'd Db\n *dbpp = db;\n db->set_pagesize(FLAGS_bdb_page_size);\n\n if (extra_flags != 0) {\n ret = db->set_flags(extra_flags);\n }\n\n // Now open the database */\n open_flags = DB_CREATE | // Allow database creation\n // DB_READ_UNCOMMITTED | // Allow uncommitted reads\n // DB_AUTO_COMMIT | // Allow autocommit\n DB_MULTIVERSION | // Multiversion concurrency control\n DB_THREAD; // Cause the database to be free-threade1\n\n db->open(txn, // Txn pointer\n file_name, // File name\n nullptr, // Logical db name\n DB_BTREE, // Database type (using btree)\n open_flags, // Open flags\n 0); // File mode. Using defaults\n\n // commit\n try {\n ret = bdb::BdbHelper::TxnCommit(&txn);\n if (ret == 0) {\n return 0;\n } else {\n DINGO_LOG(ERROR) << fmt::format(\"[bdb] txn commit failed, ret: {}.\", ret);\n }\n } catch (DbException& db_exception) {\n bdb::BdbHelper::PrintEnvStat(envp);\n DINGO_LOG(ERROR) << fmt::format(\"[bdb] error on txn commit: {} {}.\", db_exception.get_errno(),\n db_exception.what());\n ret = -1;\n }\n\n if (ret != 0) {\n DINGO_LOG(ERROR) << fmt::format(\"[bdb] error on txn commit, ret: {}.\", ret);\n return -1;\n }\n\n } catch (DbException& db_exception) {\n bdb::BdbHelper::PrintEnvStat(envp);\n DINGO_LOG(ERROR) << fmt::format(\"OpenDb: db open failed: {} {}.\", db_exception.get_errno(), db_exception.what());\n bdb::BdbHelper::TxnAbort(&txn);\n return -1;\n }\n\n return 0;\n}", "docstring": "// namespace bdb\n// Open a BDB database", "url": "https://github.com/dingodb/dingo-store/blob/6becbfa5ab7ed9327b5b260d2390b0344918c396/src/engine/bdb_raw_engine.cc#L1886-L1952", "sha": "6becbfa5ab7ed9327b5b260d2390b0344918c396"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServiceHelper::ValidateRegionState", "code": "butil::Status ServiceHelper::ValidateRegionState(store::RegionPtr region) {\n // Check is exist region.\n if (region == nullptr) {\n return butil::Status(pb::error::EREGION_NOT_FOUND, \"Not found region\");\n }\n if (region->State() == pb::common::StoreRegionState::NEW) {\n return butil::Status(pb::error::EREGION_UNAVAILABLE, \"Region(%lu) is new, waiting later\", region->Id());\n }\n if (region->State() == pb::common::StoreRegionState::STANDBY) {\n return butil::Status(pb::error::EREGION_UNAVAILABLE, \"Region(%lu) is standby, waiting later\", region->Id());\n }\n if (region->State() == pb::common::StoreRegionState::DELETING) {\n return butil::Status(pb::error::EREGION_UNAVAILABLE, \"Region(%lu) is deleting\", region->Id());\n }\n if (region->State() == pb::common::StoreRegionState::DELETED) {\n return butil::Status(pb::error::EREGION_UNAVAILABLE, \"Region(%lu) is deleted\", region->Id());\n }\n if (region->State() == pb::common::StoreRegionState::ORPHAN) {\n return butil::Status(pb::error::EREGION_UNAVAILABLE, \"Region(%lu) is orphan\", region->Id());\n }\n if (region->State() == pb::common::StoreRegionState::TOMBSTONE) {\n return butil::Status(pb::error::EREGION_UNAVAILABLE, \"Region(%lu) is tombstone\", region->Id());\n }\n\n return butil::Status();\n}", "docstring": "// Validate region state", "url": "https://github.com/dingodb/dingo-store/blob/6becbfa5ab7ed9327b5b260d2390b0344918c396/src/server/service_helper.cc#L88-L113", "sha": "6becbfa5ab7ed9327b5b260d2390b0344918c396"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetBoolAssertionFailureMessage", "code": "std::string GetBoolAssertionFailureMessage(\n const AssertionResult& assertion_result,\n const char* expression_text,\n const char* actual_predicate_value,\n const char* expected_predicate_value) {\n const char* actual_message = assertion_result.message();\n Message msg;\n msg << \"Value of: \" << expression_text\n << \"\\n Actual: \" << actual_predicate_value;\n if (actual_message[0] != '\\0')\n msg << \" (\" << actual_message << \")\";\n msg << \"\\nExpected: \" << expected_predicate_value;\n return msg.GetString();\n}", "docstring": "// Constructs a failure message for Boolean assertions such as EXPECT_TRUE.", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/gtest/src/gtest.cc#L1346-L1359", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ParseStringFlag", "code": "bool ParseStringFlag(const char* str, const char* flag, std::string* value) {\n // Gets the value of the flag as a string.\n const char* const value_str = ParseFlagValue(str, flag, false);\n\n // Aborts if the parsing failed.\n if (value_str == NULL) return false;\n\n // Sets *value to the value of the flag.\n *value = value_str;\n return true;\n}", "docstring": "// Parses a string for a string flag, in the form of\n// \"--flag=value\".\n//\n// On success, stores the value of the flag in *value, and returns\n// true. On failure, returns false without changing *value.", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/gtest/src/gtest.cc#L5087-L5097", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TEST", "code": "TEST(MessageTest, StreamsString) {\n const ::std::string str(\"Hello\");\n EXPECT_EQ(\"Hello\", (Message() << str).GetString());\n}", "docstring": "// Tests streaming std::string.", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/gtest/test/gtest-message_test.cc#L105-L108", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TEST_F", "code": "TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) {\n ASSERT_PRED_FORMAT2(PredFormatFunctor2(),\n Bool(++n1_),\n Bool(++n2_));\n finished_ = true;\n}", "docstring": "// Tests a successful ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/gtest/test/gtest_pred_impl_unittest.cc#L835-L840", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PredFunction3Int", "code": "bool PredFunction3Int(int v1, int v2, int v3) {\n return v1 + v2 + v3 > 0;\n}", "docstring": "// The following two functions are needed to circumvent a bug in\n// gcc 2.95.3, which sometimes has problem with the above template\n// function.", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/gtest/test/gtest_pred_impl_unittest.cc#L900-L902", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TEST_F", "code": "TEST_F(DoubleTest, Infinity) {\n EXPECT_DOUBLE_EQ(values_.infinity, values_.close_to_infinity);\n EXPECT_DOUBLE_EQ(-values_.infinity, -values_.close_to_infinity);\n#if !GTEST_OS_SYMBIAN\n // Nokia's STLport crashes if we try to output infinity or NaN.\n EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, -values_.infinity),\n \"-values_.infinity\");\n\n // This is interesting as the representations of infinity_ and nan1_\n // are only 1 DLP apart.\n EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, values_.nan1),\n \"values_.nan1\");\n#endif // !GTEST_OS_SYMBIAN\n}", "docstring": "// Tests comparing with infinity.\n//\n// This ensures that no overflow occurs when comparing numbers whose\n// absolute value is very large.", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/gtest/test/gtest_unittest.cc#L2976-L2989", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TEST", "code": "TEST(SPRTTest, CalculateSPRTDecisionThreshold) {\n double sigma = 0.05;\n double epsilon = 0.1;\n double decision_threshold = CalculateSPRTDecisionThreshold(sigma, epsilon);\n VLOG(0) << \"Decision threshold: \" << decision_threshold;\n\n // Test with change of values for timing.\n decision_threshold = CalculateSPRTDecisionThreshold(sigma, epsilon, 200, 3);\n VLOG(0) << \"Decision threshold: \" << decision_threshold;\n}", "docstring": "// TODO(cmsweeney): Make this test a verification (i.e. is the value coming out\n// accurate?) instead of just a sanity check.", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/src/theia/math/probability/sprt_test.cc#L92-L101", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "decode_dual_swizzle", "code": "Swizzle4 decode_dual_swizzle(Imm4 swizz, const bool extended, const bool vec4) {\n static Swizzle4 swizz_v4_std[] = {\n SWIZZLE_CHANNEL_4(X, X, X, X),\n SWIZZLE_CHANNEL_4(Y, Y, Y, Y),\n SWIZZLE_CHANNEL_4(Z, Z, Z, Z),\n SWIZZLE_CHANNEL_4(W, W, W, W),\n SWIZZLE_CHANNEL_4(X, Y, Z, W),\n SWIZZLE_CHANNEL_4(Y, Z, W, W),\n SWIZZLE_CHANNEL_4(X, Y, Z, Z),\n SWIZZLE_CHANNEL_4(X, X, Y, Z),\n SWIZZLE_CHANNEL_4(X, Y, X, Y),\n SWIZZLE_CHANNEL_4(X, Y, W, Z),\n SWIZZLE_CHANNEL_4(Z, X, Y, W),\n SWIZZLE_CHANNEL_4(Z, W, Z, W),\n SWIZZLE_CHANNEL_4(0, 0, 0, 0),\n SWIZZLE_CHANNEL_4(H, H, H, H),\n SWIZZLE_CHANNEL_4(1, 1, 1, 1),\n SWIZZLE_CHANNEL_4(2, 2, 2, 2),\n };\n\n static Swizzle4 swizz_v4_ext[] = {\n SWIZZLE_CHANNEL_4(Y, Z, X, W),\n SWIZZLE_CHANNEL_4(Z, W, X, Y),\n SWIZZLE_CHANNEL_4(X, Z, W, Y),\n SWIZZLE_CHANNEL_4(Y, Y, W, W),\n SWIZZLE_CHANNEL_4(W, Y, Z, W),\n SWIZZLE_CHANNEL_4(W, Z, W, Z),\n SWIZZLE_CHANNEL_4(X, Y, Z, X),\n SWIZZLE_CHANNEL_4(Z, Z, W, W),\n SWIZZLE_CHANNEL_4(X, W, Z, X),\n SWIZZLE_CHANNEL_4(Y, Y, Y, X),\n SWIZZLE_CHANNEL_4(Y, Y, Y, Z),\n SWIZZLE_CHANNEL_4(Z, W, Z, W),\n SWIZZLE_CHANNEL_4(Y, Z, X, Z),\n SWIZZLE_CHANNEL_4(X, X, Y, Y),\n SWIZZLE_CHANNEL_4(X, Z, W, W),\n SWIZZLE_CHANNEL_4(X, Y, Z, 1),\n };\n\n static Swizzle3 swizz_v3_std[] = {\n SWIZZLE_CHANNEL_3(X, X, X),\n SWIZZLE_CHANNEL_3(Y, Y, Y),\n SWIZZLE_CHANNEL_3(Z, Z, Z),\n SWIZZLE_CHANNEL_3(W, W, W),\n SWIZZLE_CHANNEL_3(X, Y, Z),\n SWIZZLE_CHANNEL_3(Y, Z, W),\n SWIZZLE_CHANNEL_3(X, X, Y),\n SWIZZLE_CHANNEL_3(X, Y, X),\n SWIZZLE_CHANNEL_3(Y, Y, X),\n SWIZZLE_CHANNEL_3(Y, Y, Z),\n SWIZZLE_CHANNEL_3(Z, X, Y),\n SWIZZLE_CHANNEL_3(X, Z, Y),\n SWIZZLE_CHANNEL_3(0, 0, 0),\n SWIZZLE_CHANNEL_3(H, H, H),\n SWIZZLE_CHANNEL_3(1, 1, 1),\n SWIZZLE_CHANNEL_3(2, 2, 2),\n };\n\n static Swizzle3 swizz_v3_ext[] = {\n SWIZZLE_CHANNEL_3(X, Y, Y),\n SWIZZLE_CHANNEL_3(Y, X, Y),\n SWIZZLE_CHANNEL_3(X, X, Z),\n SWIZZLE_CHANNEL_3(Y, X, X),\n SWIZZLE_CHANNEL_3(X, Y, 0),\n SWIZZLE_CHANNEL_3(X, 1, 0),\n SWIZZLE_CHANNEL_3(X, Z, Y),\n SWIZZLE_CHANNEL_3(Y, Z, X),\n SWIZZLE_CHANNEL_3(Z, Y, X),\n SWIZZLE_CHANNEL_3(Z, Z, Y),\n SWIZZLE_CHANNEL_3(X, Y, 1),\n SWIZZLE_CHANNEL_3_UNDEFINED,\n SWIZZLE_CHANNEL_3_UNDEFINED,\n SWIZZLE_CHANNEL_3_UNDEFINED,\n SWIZZLE_CHANNEL_3_UNDEFINED,\n SWIZZLE_CHANNEL_3_UNDEFINED,\n };\n\n if (vec4) {\n if (extended)\n return swizz_v4_ext[swizz];\n else\n return swizz_v4_std[swizz];\n } else {\n if (extended)\n return to_swizzle4(swizz_v3_ext[swizz]);\n else\n return to_swizzle4(swizz_v3_std[swizz]);\n }\n\n return SWIZZLE_CHANNEL_4_UNDEFINED;\n}", "docstring": "// Dual has it's own unique swizzle.", "url": "https://github.com/Vita3K/Vita3K-Android/blob/a62e232024d21ca4a74d16a5d5cacedf66bd39ea/vita3k/shader/src/usse_decode_helpers.cpp#L263-L353", "sha": "a62e232024d21ca4a74d16a5d5cacedf66bd39ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "create_owned_buffer_from_vector_of_floats", "code": "tt::tt_metal::OwnedBuffer create_owned_buffer_from_vector_of_floats(\n const std::vector& data, DataType data_type) {\n switch (data_type) {\n case DataType::BFLOAT8_B: {\n auto uint32_vector = pack_fp32_vec_as_bfp8_tiles(data, /*row_major_input=*/false, /*is_exp_a=*/false);\n return tt::tt_metal::owned_buffer::create(std::move(uint32_vector));\n }\n case DataType::BFLOAT4_B: {\n auto uint32_vector = pack_fp32_vec_as_bfp4_tiles(data, /*row_major_input=*/false, /*is_exp_a=*/false);\n return tt::tt_metal::owned_buffer::create(std::move(uint32_vector));\n }\n case DataType::FLOAT32: {\n auto data_copy = data;\n return tt::tt_metal::owned_buffer::create(std::move(data_copy));\n }\n case DataType::BFLOAT16: {\n std::vector bfloat16_data(data.size());\n std::transform(std::begin(data), std::end(data), std::begin(bfloat16_data), [](float value) {\n return bfloat16(value);\n });\n return tt::tt_metal::owned_buffer::create(std::move(bfloat16_data));\n }\n default: {\n throw std::runtime_error(\"Cannot create a host buffer!\");\n }\n }\n}", "docstring": "// copypaste from deprecated tensor pybinds ttnn", "url": "https://github.com/tenstorrent/tt-metal/blob/941b34cff33ce2953cf984ec8898af25dbfbfbb3/tt-train/sources/ttml/core/tt_tensor_utils.cpp#L57-L83", "sha": "941b34cff33ce2953cf984ec8898af25dbfbfbb3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FDKernel::GetTunnelStop", "code": "uint32_t FDKernel::GetTunnelStop(chip_id_t device_id) {\n chip_id_t mmio_device_id = tt::Cluster::instance().get_associated_mmio_device(device_id);\n for (auto tunnel : tt::Cluster::instance().get_tunnels_from_mmio_device(mmio_device_id)) {\n for (uint32_t idx = 0; idx < tunnel.size(); idx++) {\n if (tunnel[idx] == device_id) {\n return idx;\n }\n }\n }\n TT_ASSERT(false, \"Could not find tunnel stop of Device {}\", device_id);\n return 0;\n}", "docstring": "// Helper function to get the tunnel stop of current device", "url": "https://github.com/tenstorrent/tt-metal/blob/941b34cff33ce2953cf984ec8898af25dbfbfbb3/tt_metal/impl/dispatch/kernel_config/fd_kernel.cpp#L51-L62", "sha": "941b34cff33ce2953cf984ec8898af25dbfbfbb3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TextEditCallbackStub", "code": "static int TextEditCallbackStub(ImGuiInputTextCallbackData* data)\n {\n ExampleAppConsole* console = (ExampleAppConsole*)data->UserData;\n return console->TextEditCallback(data);\n }", "docstring": "// In C++11 you'd be better off using lambdas for this sort of forwarding callbacks", "url": "https://github.com/Half-People/HImGuiEditor/blob/a0d29c1ad484cafdd1410ed4bc6568bf02c14beb/HalfPeopleStudioImGuiEditor/DependentFile/API/ImGui/imgui_demo.cpp#L6944-L6948", "sha": "a0d29c1ad484cafdd1410ed4bc6568bf02c14beb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGui_ImplGlfw_UpdateKeyModifiers", "code": "static void ImGui_ImplGlfw_UpdateKeyModifiers(GLFWwindow* window)\n{\n ImGuiIO& io = ImGui::GetIO();\n io.AddKeyEvent(ImGuiMod_Ctrl, (glfwGetKey(window, GLFW_KEY_LEFT_CONTROL) == GLFW_PRESS) || (glfwGetKey(window, GLFW_KEY_RIGHT_CONTROL) == GLFW_PRESS));\n io.AddKeyEvent(ImGuiMod_Shift, (glfwGetKey(window, GLFW_KEY_LEFT_SHIFT) == GLFW_PRESS) || (glfwGetKey(window, GLFW_KEY_RIGHT_SHIFT) == GLFW_PRESS));\n io.AddKeyEvent(ImGuiMod_Alt, (glfwGetKey(window, GLFW_KEY_LEFT_ALT) == GLFW_PRESS) || (glfwGetKey(window, GLFW_KEY_RIGHT_ALT) == GLFW_PRESS));\n io.AddKeyEvent(ImGuiMod_Super, (glfwGetKey(window, GLFW_KEY_LEFT_SUPER) == GLFW_PRESS) || (glfwGetKey(window, GLFW_KEY_RIGHT_SUPER) == GLFW_PRESS));\n}", "docstring": "// X11 does not include current pressed/released modifier key in 'mods' flags submitted by GLFW\n// See https://github.com/ocornut/imgui/issues/6034 and https://github.com/glfw/glfw/issues/1630", "url": "https://github.com/Half-People/HImGuiEditor/blob/a0d29c1ad484cafdd1410ed4bc6568bf02c14beb/HalfPeopleStudioImGuiEditor/DependentFile/API/ImGui/backends/imgui_impl_glfw.cpp#L309-L316", "sha": "a0d29c1ad484cafdd1410ed4bc6568bf02c14beb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LogTextV", "code": "static inline void LogTextV(ImGuiContext& g, const char* fmt, va_list args)\n{\n if (g.LogFile)\n {\n g.LogBuffer.Buf.resize(0);\n g.LogBuffer.appendfv(fmt, args);\n ImFileWrite(g.LogBuffer.c_str(), sizeof(char), (ImU64)g.LogBuffer.size(), g.LogFile);\n }\n else\n {\n g.LogBuffer.appendfv(fmt, args);\n }\n}", "docstring": "//-----------------------------------------------------------------------------\n// [SECTION] LOGGING/CAPTURING\n//-----------------------------------------------------------------------------\n// All text output from the interface can be captured into tty/file/clipboard.\n// By default, tree nodes are automatically opened during logging.\n//-----------------------------------------------------------------------------\n// Pass text data straight to log (without being displayed)", "url": "https://github.com/Half-People/HImGuiEditor/blob/a0d29c1ad484cafdd1410ed4bc6568bf02c14beb/HalfPeopleStudioImGuiEditor/ImGui/imgui.cpp#L13263-L13275", "sha": "a0d29c1ad484cafdd1410ed4bc6568bf02c14beb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGui::CallContextHooks", "code": "void ImGui::CallContextHooks(ImGuiContext* ctx, ImGuiContextHookType hook_type)\n{\n ImGuiContext& g = *ctx;\n for (int n = 0; n < g.Hooks.Size; n++)\n if (g.Hooks[n].Type == hook_type)\n g.Hooks[n].Callback(&g, &g.Hooks[n]);\n}", "docstring": "// Call context hooks (used by e.g. test engine)\n// We assume a small number of hooks so all stored in same array", "url": "https://github.com/Half-People/HImGuiEditor/blob/a0d29c1ad484cafdd1410ed4bc6568bf02c14beb/HalfPeopleStudioImGuiEditor/Plugin/Plugin-Example/HImGuiEditorPlugin/HImGuiEditorPlugin/ImGui/imgui.cpp#L3751-L3757", "sha": "a0d29c1ad484cafdd1410ed4bc6568bf02c14beb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LogTextV", "code": "static inline void LogTextV(ImGuiContext& g, const char* fmt, va_list args)\n{\n if (g.LogFile)\n {\n g.LogBuffer.Buf.resize(0);\n g.LogBuffer.appendfv(fmt, args);\n ImFileWrite(g.LogBuffer.c_str(), sizeof(char), (ImU64)g.LogBuffer.size(), g.LogFile);\n }\n else\n {\n g.LogBuffer.appendfv(fmt, args);\n }\n}", "docstring": "//-----------------------------------------------------------------------------\n// [SECTION] LOGGING/CAPTURING\n//-----------------------------------------------------------------------------\n// All text output from the interface can be captured into tty/file/clipboard.\n// By default, tree nodes are automatically opened during logging.\n//-----------------------------------------------------------------------------\n// Pass text data straight to log (without being displayed)", "url": "https://github.com/Half-People/HImGuiEditor/blob/a0d29c1ad484cafdd1410ed4bc6568bf02c14beb/HalfPeopleStudioImGuiEditor/Plugin/Plugin-Example/HImGuiEditorPlugin/HImGuiEditorPlugin/ImGui/imgui.cpp#L13263-L13275", "sha": "a0d29c1ad484cafdd1410ed4bc6568bf02c14beb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGui_ImplDX12_CreateWindow", "code": "static void ImGui_ImplDX12_CreateWindow(ImGuiViewport* viewport)\n{\n ImGui_ImplDX12_Data* bd = ImGui_ImplDX12_GetBackendData();\n ImGui_ImplDX12_ViewportData* vd = IM_NEW(ImGui_ImplDX12_ViewportData)(bd->numFramesInFlight);\n viewport->RendererUserData = vd;\n\n // PlatformHandleRaw should always be a HWND, whereas PlatformHandle might be a higher-level handle (e.g. GLFWWindow*, SDL_Window*).\n // Some backends will leave PlatformHandleRaw NULL, in which case we assume PlatformHandle will contain the HWND.\n HWND hwnd = viewport->PlatformHandleRaw ? (HWND)viewport->PlatformHandleRaw : (HWND)viewport->PlatformHandle;\n IM_ASSERT(hwnd != 0);\n\n vd->FrameIndex = UINT_MAX;\n\n // Create command queue.\n D3D12_COMMAND_QUEUE_DESC queue_desc = {};\n queue_desc.Flags = D3D12_COMMAND_QUEUE_FLAG_NONE;\n queue_desc.Type = D3D12_COMMAND_LIST_TYPE_DIRECT;\n\n HRESULT res = S_OK;\n res = bd->pd3dDevice->CreateCommandQueue(&queue_desc, IID_PPV_ARGS(&vd->CommandQueue));\n IM_ASSERT(res == S_OK);\n\n // Create command allocator.\n for (UINT i = 0; i < bd->numFramesInFlight; ++i)\n {\n res = bd->pd3dDevice->CreateCommandAllocator(D3D12_COMMAND_LIST_TYPE_DIRECT, IID_PPV_ARGS(&vd->FrameCtx[i].CommandAllocator));\n IM_ASSERT(res == S_OK);\n }\n\n // Create command list.\n res = bd->pd3dDevice->CreateCommandList(0, D3D12_COMMAND_LIST_TYPE_DIRECT, vd->FrameCtx[0].CommandAllocator, nullptr, IID_PPV_ARGS(&vd->CommandList));\n IM_ASSERT(res == S_OK);\n vd->CommandList->Close();\n\n // Create fence.\n res = bd->pd3dDevice->CreateFence(0, D3D12_FENCE_FLAG_NONE, IID_PPV_ARGS(&vd->Fence));\n IM_ASSERT(res == S_OK);\n\n vd->FenceEvent = CreateEvent(nullptr, FALSE, FALSE, nullptr);\n IM_ASSERT(vd->FenceEvent != nullptr);\n\n // Create swap chain\n // FIXME-VIEWPORT: May want to copy/inherit swap chain settings from the user/application.\n DXGI_SWAP_CHAIN_DESC1 sd1;\n ZeroMemory(&sd1, sizeof(sd1));\n sd1.BufferCount = bd->numFramesInFlight;\n sd1.Width = (UINT)viewport->Size.x;\n sd1.Height = (UINT)viewport->Size.y;\n sd1.Format = bd->RTVFormat;\n sd1.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;\n sd1.SampleDesc.Count = 1;\n sd1.SampleDesc.Quality = 0;\n sd1.SwapEffect = DXGI_SWAP_EFFECT_FLIP_DISCARD;\n sd1.AlphaMode = DXGI_ALPHA_MODE_UNSPECIFIED;\n sd1.Scaling = DXGI_SCALING_STRETCH;\n sd1.Stereo = FALSE;\n\n IDXGIFactory4* dxgi_factory = nullptr;\n res = ::CreateDXGIFactory1(IID_PPV_ARGS(&dxgi_factory));\n IM_ASSERT(res == S_OK);\n\n IDXGISwapChain1* swap_chain = nullptr;\n res = dxgi_factory->CreateSwapChainForHwnd(vd->CommandQueue, hwnd, &sd1, nullptr, nullptr, &swap_chain);\n IM_ASSERT(res == S_OK);\n\n dxgi_factory->Release();\n\n // Or swapChain.As(&mSwapChain)\n IM_ASSERT(vd->SwapChain == nullptr);\n swap_chain->QueryInterface(IID_PPV_ARGS(&vd->SwapChain));\n swap_chain->Release();\n\n // Create the render targets\n if (vd->SwapChain)\n {\n D3D12_DESCRIPTOR_HEAP_DESC desc = {};\n desc.Type = D3D12_DESCRIPTOR_HEAP_TYPE_RTV;\n desc.NumDescriptors = bd->numFramesInFlight;\n desc.Flags = D3D12_DESCRIPTOR_HEAP_FLAG_NONE;\n desc.NodeMask = 1;\n\n HRESULT hr = bd->pd3dDevice->CreateDescriptorHeap(&desc, IID_PPV_ARGS(&vd->RtvDescHeap));\n IM_ASSERT(hr == S_OK);\n\n SIZE_T rtv_descriptor_size = bd->pd3dDevice->GetDescriptorHandleIncrementSize(D3D12_DESCRIPTOR_HEAP_TYPE_RTV);\n D3D12_CPU_DESCRIPTOR_HANDLE rtv_handle = vd->RtvDescHeap->GetCPUDescriptorHandleForHeapStart();\n for (UINT i = 0; i < bd->numFramesInFlight; i++)\n {\n vd->FrameCtx[i].RenderTargetCpuDescriptors = rtv_handle;\n rtv_handle.ptr += rtv_descriptor_size;\n }\n\n ID3D12Resource* back_buffer;\n for (UINT i = 0; i < bd->numFramesInFlight; i++)\n {\n IM_ASSERT(vd->FrameCtx[i].RenderTarget == nullptr);\n vd->SwapChain->GetBuffer(i, IID_PPV_ARGS(&back_buffer));\n bd->pd3dDevice->CreateRenderTargetView(back_buffer, nullptr, vd->FrameCtx[i].RenderTargetCpuDescriptors);\n vd->FrameCtx[i].RenderTarget = back_buffer;\n }\n }\n\n for (UINT i = 0; i < bd->numFramesInFlight; i++)\n ImGui_ImplDX12_DestroyRenderBuffers(&vd->FrameRenderBuffers[i]);\n}", "docstring": "//--------------------------------------------------------------------------------------------------------\n// MULTI-VIEWPORT / PLATFORM INTERFACE SUPPORT\n// This is an _advanced_ and _optional_ feature, allowing the backend to create and handle multiple viewports simultaneously.\n// If you are new to dear imgui or creating a new binding for dear imgui, it is recommended that you completely ignore this section first..\n//--------------------------------------------------------------------------------------------------------", "url": "https://github.com/Half-People/HImGuiEditor/blob/a0d29c1ad484cafdd1410ed4bc6568bf02c14beb/x64/Release/DependentFile/API/ImGui/backends/imgui_impl_dx12.cpp#L843-L947", "sha": "a0d29c1ad484cafdd1410ed4bc6568bf02c14beb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TFT_eSPI::setCursor", "code": "void TFT_eSPI::setCursor(int16_t x, int16_t y, uint8_t font)\n{\n textfont = font;\n cursor_x = x;\n cursor_y = y;\n}", "docstring": "/***************************************************************************************\n** Function name: setCursor\n** Description: Set the text cursor x,y position and font\n***************************************************************************************/", "url": "https://github.com/Xinyuan-LilyGO/T-Display-S3-AMOLED/blob/edd133335c9f7c38d1e9be2d0eb67371f1f6428e/lib/TFT_eSPI/TFT_eSPI.cpp#L2866-L2871", "sha": "edd133335c9f7c38d1e9be2d0eb67371f1f6428e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createGradientPostprocessingPass", "code": "std::unique_ptr createGradientPostprocessingPass()\n{\n return std::make_unique();\n}", "docstring": "// namespace gradient", "url": "https://github.com/PennyLaneAI/catalyst/blob/729d468ad1bec692242c6b20560a2b9922debb31/mlir/lib/Gradient/Transforms/gradient_postprocess.cpp#L54-L57", "sha": "729d468ad1bec692242c6b20560a2b9922debb31"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QCPAxis::setRange", "code": "void QCPAxis::setRange(double lower, double upper)\n{\n if (lower == mRange.lower && upper == mRange.upper)\n return;\n \n if (!QCPRange::validRange(lower, upper)) return;\n QCPRange oldRange = mRange;\n mRange.lower = lower;\n mRange.upper = upper;\n if (mScaleType == stLogarithmic)\n {\n mRange = mRange.sanitizedForLogScale();\n } else\n {\n mRange = mRange.sanitizedForLinScale();\n }\n emit rangeChanged(mRange);\n emit rangeChanged(mRange, oldRange);\n}", "docstring": "/*!\n \\overload\n \n Sets the lower and upper bound of the axis range.\n \n To invert the direction of an axis, use \\ref setRangeReversed.\n \n There is also a slot to set a range, see \\ref setRange(const QCPRange &range).\n*/", "url": "https://github.com/zhuzichu520/FluentUI/blob/09e04302930883c06d87a0c0e304d527f4e8c6cc/src/qmlcustomplot/qcustomplot.cpp#L8459-L8477", "sha": "09e04302930883c06d87a0c0e304d527f4e8c6cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QCPTextElement::QCPTextElement", "code": "QCPTextElement::QCPTextElement(QCustomPlot *parentPlot, const QString &text, double pointSize) :\n QCPLayoutElement(parentPlot),\n mText(text),\n mTextFlags(Qt::AlignCenter),\n mFont(QFont(QLatin1String(\"sans serif\"), int(pointSize))), // will be taken from parentPlot if available, see below\n mTextColor(Qt::black),\n mSelectedFont(QFont(QLatin1String(\"sans serif\"), int(pointSize))), // will be taken from parentPlot if available, see below\n mSelectedTextColor(Qt::blue),\n mSelectable(false),\n mSelected(false)\n{\n mFont.setPointSizeF(pointSize); // set here again as floating point, because constructor above only takes integer\n if (parentPlot)\n {\n mFont = parentPlot->font();\n mFont.setPointSizeF(pointSize);\n mSelectedFont = parentPlot->font();\n mSelectedFont.setPointSizeF(pointSize);\n }\n setMargins(QMargins(2, 2, 2, 2));\n}", "docstring": "/*! \\overload\n \n Creates a new QCPTextElement instance and sets default values.\n\n The initial text is set to \\a text with \\a pointSize.\n*/", "url": "https://github.com/zhuzichu520/FluentUI/blob/09e04302930883c06d87a0c0e304d527f4e8c6cc/src/qmlcustomplot/qcustomplot.cpp#L19729-L19749", "sha": "09e04302930883c06d87a0c0e304d527f4e8c6cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QCPFinancial::setChartStyle", "code": "void QCPFinancial::setChartStyle(QCPFinancial::ChartStyle style)\n{\n mChartStyle = style;\n}", "docstring": "/*!\n Sets which representation style shall be used to display the OHLC data.\n*/", "url": "https://github.com/zhuzichu520/FluentUI/blob/09e04302930883c06d87a0c0e304d527f4e8c6cc/src/qmlcustomplot/qcustomplot.cpp#L27156-L27159", "sha": "09e04302930883c06d87a0c0e304d527f4e8c6cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QCPItemCurve::mainPen", "code": "QPen QCPItemCurve::mainPen() const\n{\n return mSelected ? mSelectedPen : mPen;\n}", "docstring": "/*! \\internal\n\n Returns the pen that should be used for drawing lines. Returns mPen when the\n item is not selected and mSelectedPen when it is.\n*/", "url": "https://github.com/zhuzichu520/FluentUI/blob/09e04302930883c06d87a0c0e304d527f4e8c6cc/src/qmlcustomplot/qcustomplot.cpp#L29426-L29429", "sha": "09e04302930883c06d87a0c0e304d527f4e8c6cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QCPPolarAxisRadial::subTickLengthOut", "code": "int QCPPolarAxisRadial::subTickLengthOut() const\n{\n return mSubTickLengthOut;\n}", "docstring": "/* No documentation as it is a property getter */", "url": "https://github.com/zhuzichu520/FluentUI/blob/09e04302930883c06d87a0c0e304d527f4e8c6cc/src/qmlcustomplot/qcustomplot.cpp#L31207-L31210", "sha": "09e04302930883c06d87a0c0e304d527f4e8c6cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "rx_timing_window_params_us_to_symbols", "code": "static ofh::rx_window_timing_parameters\nrx_timing_window_params_us_to_symbols(std::chrono::microseconds Ta4_max,\n std::chrono::microseconds Ta4_min,\n std::chrono::duration symbol_duration)\n{\n ofh::rx_window_timing_parameters rx_window_timing_params;\n rx_window_timing_params.sym_start = std::floor(Ta4_min / symbol_duration);\n rx_window_timing_params.sym_end = std::ceil(Ta4_max / symbol_duration);\n\n return rx_window_timing_params;\n}", "docstring": "/// Converts reception window timing parameters from microseconds to number of symbols given the symbol duration.", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/apps/units/flexible_o_du/split_7_2/helpers/ru_ofh_config_translator.cpp#L46-L56", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ran_param_definition_choice_c::set", "code": "void ran_param_definition_choice_c::set(types::options e)\n{\n type_ = e;\n switch (type_) {\n case types::choice_list:\n c = ran_param_definition_choice_list_s{};\n break;\n case types::choice_structure:\n c = ran_param_definition_choice_structure_s{};\n break;\n case types::nulltype:\n break;\n default:\n log_invalid_choice_id(type_, \"ran_param_definition_choice_c\");\n }\n}", "docstring": "// RANParameter-Definition-Choice ::= CHOICE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/e2sm/e2sm_rc_ies.cpp#L320-L335", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ran_param_value_type_choice_elem_false_s::pack", "code": "SRSASN_CODE ran_param_value_type_choice_elem_false_s::pack(bit_ref& bref) const\n{\n bref.pack(ext, 1);\n HANDLE_CODE(bref.pack(ran_param_value_present, 1));\n\n if (ran_param_value_present) {\n HANDLE_CODE(ran_param_value.pack(bref));\n }\n\n return SRSASN_SUCCESS;\n}", "docstring": "// RANParameter-ValueType-Choice-ElementFalse ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/e2sm/e2sm_rc_ies.cpp#L8579-L8589", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "active_ul_bwp_s::pack", "code": "SRSASN_CODE active_ul_bwp_s::pack(bit_ref& bref) const\n{\n HANDLE_CODE(bref.pack(shift7dot5k_hz_present, 1));\n HANDLE_CODE(bref.pack(ie_exts_present, 1));\n\n HANDLE_CODE(pack_integer(bref, location_and_bw, (uint16_t)0u, (uint16_t)37949u, true, true));\n HANDLE_CODE(subcarrier_spacing.pack(bref));\n HANDLE_CODE(cp.pack(bref));\n HANDLE_CODE(pack_integer(bref, tx_direct_current_location, (uint16_t)0u, (uint16_t)3301u, true, true));\n if (shift7dot5k_hz_present) {\n HANDLE_CODE(shift7dot5k_hz.pack(bref));\n }\n HANDLE_CODE(srs_cfg.pack(bref));\n if (ie_exts_present) {\n HANDLE_CODE(ie_exts.pack(bref));\n }\n\n return SRSASN_SUCCESS;\n}", "docstring": "// ActiveULBWP ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/f1ap/f1ap_ies.cpp#L5856-L5874", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "broadcast_m_rbs_failed_to_be_modified_item_s::pack", "code": "SRSASN_CODE broadcast_m_rbs_failed_to_be_modified_item_s::pack(bit_ref& bref) const\n{\n bref.pack(ext, 1);\n HANDLE_CODE(bref.pack(cause_present, 1));\n HANDLE_CODE(bref.pack(ie_exts_present, 1));\n\n HANDLE_CODE(pack_integer(bref, mrb_id, (uint16_t)1u, (uint16_t)512u, true, true));\n if (cause_present) {\n HANDLE_CODE(cause.pack(bref));\n }\n if (ie_exts_present) {\n HANDLE_CODE(ie_exts.pack(bref));\n }\n\n return SRSASN_SUCCESS;\n}", "docstring": "// BroadcastMRBs-FailedToBeModified-Item ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/f1ap/f1ap_ies.cpp#L14385-L14400", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "prs_cfg_s::pack", "code": "SRSASN_CODE prs_cfg_s::pack(bit_ref& bref) const\n{\n HANDLE_CODE(bref.pack(ie_exts_present, 1));\n\n HANDLE_CODE(pack_dyn_seq_of(bref, prs_res_set_list, 1, 8, true));\n if (ie_exts_present) {\n HANDLE_CODE(ie_exts.pack(bref));\n }\n\n return SRSASN_SUCCESS;\n}", "docstring": "// PRSConfiguration ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/f1ap/f1ap_ies.cpp#L35129-L35139", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "multicast_distribution_setup_request_ies_o::idx_to_id", "code": "uint32_t multicast_distribution_setup_request_ies_o::idx_to_id(uint32_t idx)\n{\n static const uint32_t names[] = {451, 452, 502, 503};\n return map_enum_number(names, 4, idx, \"id\");\n}", "docstring": "// MulticastDistributionSetupRequestIEs ::= OBJECT SET OF F1AP-PROTOCOL-IES", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/f1ap/f1ap_pdu_contents.cpp#L19058-L19062", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sl_drbs_failed_to_be_modified_item_ies_o::idx_to_id", "code": "uint32_t sl_drbs_failed_to_be_modified_item_ies_o::idx_to_id(uint32_t idx)\n{\n static const uint32_t names[] = {313};\n return map_enum_number(names, 1, idx, \"id\");\n}", "docstring": "// SLDRBs-FailedToBeModified-ItemIEs ::= OBJECT SET OF F1AP-PROTOCOL-IES", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/f1ap/f1ap_pdu_items.cpp#L5570-L5574", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "intersys_son_cfg_transfer_s::pack", "code": "SRSASN_CODE intersys_son_cfg_transfer_s::pack(bit_ref& bref) const\n{\n bref.pack(ext, 1);\n HANDLE_CODE(bref.pack(ie_exts_present, 1));\n\n HANDLE_CODE(transfer_type.pack(bref));\n HANDLE_CODE(intersys_son_info.pack(bref));\n if (ie_exts_present) {\n HANDLE_CODE(ie_exts.pack(bref));\n }\n\n return SRSASN_SUCCESS;\n}", "docstring": "// IntersystemSONConfigurationTransfer ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/ngap/ngap_ies.cpp#L17882-L17894", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ran_cp_relocation_ind_ies_o::value_c::set", "code": "void ran_cp_relocation_ind_ies_o::value_c::set(types::options e)\n{\n type_ = e;\n switch (type_) {\n case types::ran_ue_ngap_id:\n c = uint64_t{};\n break;\n case types::five_g_s_tmsi:\n c = five_g_s_tmsi_s{};\n break;\n case types::eutra_cgi:\n c = eutra_cgi_s{};\n break;\n case types::tai:\n c = tai_s{};\n break;\n case types::ul_cp_security_info:\n c = ul_cp_security_info_s{};\n break;\n case types::nulltype:\n break;\n default:\n log_invalid_choice_id(type_, \"ran_cp_relocation_ind_ies_o::value_c\");\n }\n}", "docstring": "// Value ::= OPEN TYPE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/ngap/ngap_pdu_contents.cpp#L28248-L28272", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "feature_combination_r17_s::pack", "code": "SRSASN_CODE feature_combination_r17_s::pack(bit_ref& bref) const\n{\n HANDLE_CODE(bref.pack(red_cap_r17_present, 1));\n HANDLE_CODE(bref.pack(small_data_r17_present, 1));\n HANDLE_CODE(bref.pack(nsag_r17.size() > 0, 1));\n HANDLE_CODE(bref.pack(msg3_repeats_r17_present, 1));\n HANDLE_CODE(bref.pack(spare4_present, 1));\n HANDLE_CODE(bref.pack(spare3_present, 1));\n HANDLE_CODE(bref.pack(spare2_present, 1));\n HANDLE_CODE(bref.pack(spare1_present, 1));\n\n if (nsag_r17.size() > 0) {\n HANDLE_CODE(pack_dyn_seq_of(bref, nsag_r17, 1, 8));\n }\n\n return SRSASN_SUCCESS;\n}", "docstring": "// FeatureCombination-r17 ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/rrc_nr/bwp_cfg.cpp#L428-L444", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pucch_res_ext_v1610_s::pack", "code": "SRSASN_CODE pucch_res_ext_v1610_s::pack(bit_ref& bref) const\n{\n bref.pack(ext, 1);\n HANDLE_CODE(bref.pack(interlace_alloc_r16_present, 1));\n HANDLE_CODE(bref.pack(format_v1610_present, 1));\n\n if (interlace_alloc_r16_present) {\n HANDLE_CODE(pack_integer(bref, interlace_alloc_r16.rb_set_idx_r16, (uint8_t)0u, (uint8_t)4u));\n HANDLE_CODE(interlace_alloc_r16.interlace0_r16.pack(bref));\n }\n if (format_v1610_present) {\n HANDLE_CODE(format_v1610.pack(bref));\n }\n\n if (ext) {\n ext_groups_packer_guard group_flags;\n group_flags[0] |= format_v1700.is_present();\n group_flags[0] |= pucch_repeat_nrof_slots_r17_present;\n group_flags.pack(bref);\n\n if (group_flags[0]) {\n varlength_field_pack_guard varlen_scope(bref, false);\n\n HANDLE_CODE(bref.pack(format_v1700.is_present(), 1));\n HANDLE_CODE(bref.pack(pucch_repeat_nrof_slots_r17_present, 1));\n if (format_v1700.is_present()) {\n HANDLE_CODE(pack_integer(bref, format_v1700->nrof_prbs_r17, (uint8_t)1u, (uint8_t)16u));\n }\n if (pucch_repeat_nrof_slots_r17_present) {\n HANDLE_CODE(pucch_repeat_nrof_slots_r17.pack(bref));\n }\n }\n }\n return SRSASN_SUCCESS;\n}", "docstring": "// PUCCH-ResourceExt-v1610 ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/rrc_nr/bwp_cfg.cpp#L7530-L7564", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "meas_trigger_quant_offset_c::destroy_", "code": "void meas_trigger_quant_offset_c::destroy_() {}", "docstring": "// MeasTriggerQuantityOffset ::= CHOICE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/rrc_nr/meas_cfg.cpp#L738-L738", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "var_conn_est_fail_report_list_r17_s::pack", "code": "SRSASN_CODE var_conn_est_fail_report_list_r17_s::pack(bit_ref& bref) const\n{\n HANDLE_CODE(pack_dyn_seq_of(bref, conn_est_fail_report_list_r17, 1, 4));\n\n return SRSASN_SUCCESS;\n}", "docstring": "// VarConnEstFailReportList-r17 ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/rrc_nr/nr_ue_variables.cpp#L491-L496", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "dummy_a_s::pack", "code": "SRSASN_CODE dummy_a_s::pack(bit_ref& bref) const\n{\n HANDLE_CODE(pack_integer(bref, max_num_nzp_csi_rs_per_cc, (uint8_t)1u, (uint8_t)32u));\n HANDLE_CODE(max_num_ports_across_nzp_csi_rs_per_cc.pack(bref));\n HANDLE_CODE(max_num_cs_im_per_cc.pack(bref));\n HANDLE_CODE(max_num_simul_csi_rs_act_bwp_all_cc.pack(bref));\n HANDLE_CODE(total_num_ports_simul_csi_rs_act_bwp_all_cc.pack(bref));\n\n return SRSASN_SUCCESS;\n}", "docstring": "// DummyA ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/rrc_nr/ue_cap.cpp#L19179-L19188", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "reestab_ue_id_s::pack", "code": "SRSASN_CODE reestab_ue_id_s::pack(bit_ref& bref) const\n{\n HANDLE_CODE(pack_integer(bref, c_rnti, (uint32_t)0u, (uint32_t)65535u));\n HANDLE_CODE(pack_integer(bref, pci, (uint16_t)0u, (uint16_t)1007u));\n HANDLE_CODE(short_mac_i.pack(bref));\n\n return SRSASN_SUCCESS;\n}", "docstring": "// ReestabUE-Identity ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/rrc_nr/ul_ccch_msg_ies.cpp#L222-L229", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sl_qos_info_r16_s::pack", "code": "SRSASN_CODE sl_qos_info_r16_s::pack(bit_ref& bref) const\n{\n HANDLE_CODE(bref.pack(sl_qos_profile_r16_present, 1));\n\n HANDLE_CODE(pack_integer(bref, sl_qos_flow_id_r16, (uint16_t)1u, (uint16_t)2048u));\n if (sl_qos_profile_r16_present) {\n HANDLE_CODE(sl_qos_profile_r16.pack(bref));\n }\n\n return SRSASN_SUCCESS;\n}", "docstring": "// SL-QoS-Info-r16 ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/rrc_nr/ul_dcch_msg_ies.cpp#L1761-L1771", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "scg_fail_info_ies_s::pack", "code": "SRSASN_CODE scg_fail_info_ies_s::pack(bit_ref& bref) const\n{\n HANDLE_CODE(bref.pack(fail_report_scg_present, 1));\n HANDLE_CODE(bref.pack(non_crit_ext_present, 1));\n\n if (fail_report_scg_present) {\n HANDLE_CODE(fail_report_scg.pack(bref));\n }\n if (non_crit_ext_present) {\n HANDLE_CODE(non_crit_ext.pack(bref));\n }\n\n return SRSASN_SUCCESS;\n}", "docstring": "// SCGFailureInformation-IEs ::= SEQUENCE", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/asn1/rrc_nr/ul_dcch_msg_ies.cpp#L18424-L18437", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fill_format_2_3_4_harq", "code": "static void fill_format_2_3_4_harq(fapi::uci_pucch_pdu_format_2_3_4_builder& builder, const pucch_uci_message& message)\n{\n units::bits harq_len = units::bits(message.get_expected_nof_harq_ack_bits());\n if (harq_len.value() == 0) {\n return;\n }\n\n uci_pusch_or_pucch_f2_3_4_detection_status status =\n to_fapi_uci_detection_status(message.get_status(), message.get_expected_nof_bits_full_payload());\n\n // Write an empty payload on detection failure.\n if (!is_fapi_uci_payload_valid(status)) {\n builder.set_harq_parameters(status, harq_len.value(), {});\n return;\n }\n\n builder.set_harq_parameters(status,\n harq_len.value(),\n bounded_bitset(message.get_harq_ack_bits().begin(),\n message.get_harq_ack_bits().end()));\n}", "docstring": "/// Fills the HARQ parameters for PUCCH Format 2/3/4 using the given builder and message.", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/lib/fapi_adaptor/phy/phy_to_fapi_results_event_translator.cpp#L429-L449", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TEST", "code": "TEST(validate_dl_tti_request, valid_request_passes)\n{\n dl_tti_request_message msg = build_valid_dl_tti_request();\n\n const auto& result = validate_dl_tti_request(msg);\n\n ASSERT_TRUE(result);\n}", "docstring": "/// Tests that a valid UL_TTI.request message validates correctly.", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/tests/unittests/fapi/validators/dl_tti_request_test.cpp#L77-L84", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TEST_F", "code": "TEST_F(DciValidatorFallbackFixture, SupplementaryUplinkNotSupported)\n{\n dci_size_config config = get_base_dci_config();\n config.sul_configured = true;\n std::string assert_message = fmt::format(\"SUL is not currently supported by the DCI size alignment procedure.\");\n\n test_validator(config, assert_message);\n}", "docstring": "// SUL not configured check.", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/tests/unittests/ran/pdcch/dci_packing_validator_test.cpp#L273-L280", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_next_candidate_alloc_slot", "code": "slot_point get_next_candidate_alloc_slot(slot_point sched_slot, unsigned nof_slot_grid_occupancy) const\n {\n if (nof_slot_grid_occupancy == 0) {\n return sched_slot;\n }\n\n unsigned occupy_grid_slot_cnt = 0;\n\n // The allocation must be on a DL slot.\n do {\n sched_slot++;\n if (bench->cell_cfg.is_dl_enabled(sched_slot)) {\n occupy_grid_slot_cnt++;\n }\n } while (occupy_grid_slot_cnt != nof_slot_grid_occupancy);\n\n auto k1_falls_on_ul = [&cfg = bench->cell_cfg](slot_point pdsch_slot) {\n static const std::array dci_1_0_k1_values = {4, 5, 6, 7};\n return std::any_of(dci_1_0_k1_values.begin(), dci_1_0_k1_values.end(), [&cfg, pdsch_slot](uint8_t k1) {\n return cfg.is_ul_enabled(pdsch_slot + k1);\n });\n };\n\n // Make sure the final slot for the SRB0/SRB1 PDSCH is such that the corresponding PUCCH falls on a UL slot.\n while ((not k1_falls_on_ul(sched_slot)) or (not bench->cell_cfg.is_dl_enabled(sched_slot)) or\n csi_helper::is_csi_rs_slot(bench->cell_cfg, sched_slot)) {\n sched_slot++;\n }\n\n return sched_slot;\n }", "docstring": "// Returns the next candidate slot at which the SRB0 scheduler is expected to allocate a grant.", "url": "https://github.com/srsran/srsRAN_Project/blob/a041e3162d7ea94a7963437f32df372fae5d21ea/tests/unittests/scheduler/ue_scheduling/fallback_scheduler_test.cpp#L748-L778", "sha": "a041e3162d7ea94a7963437f32df372fae5d21ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OCLPerfAtomicSpeed::SetKernelArguments", "code": "void OCLPerfAtomicSpeed::SetKernelArguments(const AtomicType atomicType) {\n int Arg = 0;\n int localSize = 0;\n int itemsPerThread = 1;\n cl_int status = CL_SUCCESS;\n\n switch (atomicType) {\n case LocalHistogram:\n // Set arguments for the local atomics histogram kernel\n status = _wrapper->clSetKernelArg(_kernels[0], Arg++, sizeof(cl_mem),\n (void *)&_inputBuffer);\n CHECK_RESULT(status, \"clSetKernelArg failed. (inputBuffer)\");\n\n status |= _wrapper->clSetKernelArg(_kernels[0], Arg++, sizeof(cl_mem),\n (void *)&_outputBuffer);\n CHECK_RESULT(status, \"clSetKernelArg failed. (outputBuffer)\");\n\n status |= _wrapper->clSetKernelArg(_kernels[0], Arg++,\n sizeof(_n4VectorsPerThread),\n (void *)&_n4VectorsPerThread);\n CHECK_RESULT(status, \"clSetKernelArg failed. (n4VectorsPerThread)\");\n\n // Set arguments for the local atomics reduce kernel\n Arg = 0;\n status |= _wrapper->clSetKernelArg(_kernels[1], Arg++, sizeof(cl_mem),\n (void *)&_outputBuffer);\n CHECK_RESULT(status, \"clSetKernelArg failed. (outputBuffer)\");\n\n status |= _wrapper->clSetKernelArg(_kernels[1], Arg++, sizeof(_nGroups),\n (void *)&_nGroups);\n CHECK_RESULT(status, \"clSetKernelArg failed. (nGroups)\");\n break;\n case LocalReductionAtomics:\n case LocalReductionNoAtomics:\n case Local4ReductionNoAtomics:\n case Local4ReductionAtomics:\n status = _wrapper->clSetKernelArg(_kernels[0], Arg++, sizeof(cl_mem),\n (void *)&_inputBuffer);\n CHECK_RESULT(status, \"clSetKernelArg failed. (inputBuffer)\");\n\n status |= _wrapper->clSetKernelArg(_kernels[0], Arg++, sizeof(cl_mem),\n (void *)&_outputBuffer);\n CHECK_RESULT(status, \"clSetKernelArg failed. (outputBuffer)\");\n\n localSize = DEFAULT_WG_SIZE * sizeof(cl_uint);\n if ((Local4ReductionNoAtomics == atomicType) ||\n (Local4ReductionAtomics == atomicType))\n localSize *= 4;\n status = _wrapper->clSetKernelArg(_kernels[0], Arg++, localSize, NULL);\n CHECK_RESULT(status, \"clSetKernelArg failed. (local memory)\");\n break;\n case GlobalHistogram:\n case Global4Histogram:\n case GlobalWGReduction:\n case Global4WGReduction:\n case GlobalAllToZeroReduction:\n case Global4AllToZeroReduction:\n // Set arguments for the global atomics histogram kernel\n if ((Global4Histogram == atomicType) ||\n (Global4WGReduction == atomicType) ||\n (Global4AllToZeroReduction == atomicType))\n itemsPerThread = 4;\n\n status = _wrapper->clSetKernelArg(\n _kernels[0], Arg++, sizeof(itemsPerThread), (void *)&itemsPerThread);\n CHECK_RESULT(status, \"clSetKernelArg failed. (itemsPerThread)\");\n\n status = _wrapper->clSetKernelArg(_kernels[0], Arg++, sizeof(cl_mem),\n (void *)&_inputBuffer);\n CHECK_RESULT(status, \"clSetKernelArg failed. (inputBuffer)\");\n\n status |= _wrapper->clSetKernelArg(_kernels[0], Arg++, sizeof(cl_mem),\n (void *)&_outputBuffer);\n CHECK_RESULT(status, \"clSetKernelArg failed. (outputBuffer)\");\n break;\n default:\n CHECK_RESULT(true, \"Atomic type not supported (clSetKernelArg)\");\n }\n}", "docstring": "// Sets the kernel arguments based on the current test type.", "url": "https://github.com/ROCm/clr/blob/a8edb8d467ebb5678d2f4506bd01efd3aaeddcab/opencl/tests/ocltst/module/perf/OCLPerfAtomicSpeed.cpp#L381-L459", "sha": "a8edb8d467ebb5678d2f4506bd01efd3aaeddcab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGui::PushColumnsBackground", "code": "void ImGui::PushColumnsBackground()\n{\n ImGuiWindow* window = GetCurrentWindowRead();\n ImGuiOldColumns* columns = window->DC.CurrentColumns;\n if (columns->Count == 1)\n return;\n\n // Optimization: avoid SetCurrentChannel() + PushClipRect()\n columns->HostBackupClipRect = window->ClipRect;\n SetWindowClipRectBeforeSetChannel(window, columns->HostInitialClipRect);\n columns->Splitter.SetCurrentChannel(window->DrawList, 0);\n}", "docstring": "// Get into the columns background draw command (which is generally the same draw command as before we called BeginColumns)", "url": "https://github.com/fedes1to/Zygisk-ImGui-Menu/blob/faffb17a4ba56d2d208cd25b912260c79cd99089/module/src/main/cpp/ImGui/imgui_tables.cpp#L4071-L4082", "sha": "faffb17a4ba56d2d208cd25b912260c79cd99089"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sd2Card::readEnd", "code": "void Sd2Card::readEnd(void) {\n if (inBlock_) {\n // skip data and crc\n #ifdef OPTIMIZE_HARDWARE_SPI\n // optimize skip for hardware\n SPDR = 0XFF;\n while (offset_++ < 513) {\n while (!(SPSR & (1 << SPIF)))\n ;\n SPDR = 0XFF;\n }\n // wait for last crc byte\n while (!(SPSR & (1 << SPIF)))\n ;\n #else // OPTIMIZE_HARDWARE_SPI\n while (offset_++ < 514) {\n spiRec();\n }\n #endif // OPTIMIZE_HARDWARE_SPI\n chipSelectHigh();\n inBlock_ = 0;\n }\n}", "docstring": "//------------------------------------------------------------------------------\n/** Skip remaining data in a block when in partial block read mode. */", "url": "https://github.com/MIT-Senseable-City-Lab/flatburn/blob/dc5bdbfeb53ac85cb26f27325326cbd7c868c9a3/Build/Firmware/lib/sdcard/src/utility/Sd2Card.cpp#L475-L497", "sha": "dc5bdbfeb53ac85cb26f27325326cbd7c868c9a3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "main", "code": "int main(int argc, char *argv[])\n{\n llvm::InitLLVM init_llvm(argc, argv);\n\n // Initialize passes. See llvm/tools/opt/opt.cpp\n llvm::PassRegistry ®istry = *llvm::PassRegistry::getPassRegistry();\n llvm::initializeCore(registry);\n llvm::initializeAnalysis(registry);\n llvm::initializeTransformUtils(registry);\n\n cl::ParseCommandLineOptions(argc, argv, \"Nanotube back end\\n\");\n\n llvm::LLVMContext context;\n llvm::SMDiagnostic sm_diag;\n\n llvm::legacy::PassManager pm;\n llvm::Error err = add_passes(pm);\n if (err) {\n llvm::errs() << argv[0] << \": \" << toString(std::move(err)) << \"\\n\";\n return 1;\n }\n\n std::unique_ptr module;\n module = parseIRFile(opt_input_filename, sm_diag, context, false);\n if (!module) {\n sm_diag.print(argv[0], llvm::WithColor::error(llvm::errs(), argv[0]));\n return 1;\n }\n\n Triple the_triple = Triple(module->getTargetTriple());\n std::cout << \"Target triple: \" << the_triple.getTriple() << \"\\n\";\n\n pm.add(create_hls_printer(opt_output_directory, opt_overwrite));\n\n pm.run(*module);\n\n return 0;\n}", "docstring": "///////////////////////////////////////////////////////////////////////////\n// The main program.", "url": "https://github.com/Xilinx/nanotube/blob/9ff2e920b02eb1b7df55811cf8b25f1dd410f61c/back_end/back_end_main.cpp#L110-L147", "sha": "9ff2e920b02eb1b7df55811cf8b25f1dd410f61c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RISAM2::dot", "code": "void RISAM2::dot(std::ostream& s, sharedClique clique, const KeyFormatter& keyFormatter, int parentnum) const {\n static int num = 0;\n bool first = true;\n std::stringstream out;\n out << num;\n std::string parent = out.str();\n parent += \"[label=\\\"\";\n\n for (Key key : clique->conditional_->frontals()) {\n if (!first) parent += \", \";\n first = false;\n parent += keyFormatter(key);\n }\n\n if (clique->parent()) {\n parent += \" : \";\n s << parentnum << \"->\" << num << \"\\n\";\n }\n\n first = true;\n for (Key parentKey : clique->conditional_->parents()) {\n if (!first) parent += \", \";\n first = false;\n parent += keyFormatter(parentKey);\n }\n\n parent += \"\\\"\";\n bool update = false;\n bool convex = false;\n for (Key key : clique->conditional_->frontals()) {\n if (last_update_info_.updateInvolvedKeys.find(key) != last_update_info_.updateInvolvedKeys.end()) update = true;\n if (last_update_info_.affectedKeysConvex.find(key) != last_update_info_.affectedKeysConvex.end()) convex = true;\n }\n for (Key key : clique->conditional_->parents()) {\n if (last_update_info_.updateInvolvedKeys.find(key) != last_update_info_.updateInvolvedKeys.end()) update = true;\n if (last_update_info_.affectedKeysConvex.find(key) != last_update_info_.affectedKeysConvex.end()) convex = true;\n }\n if (update) {\n parent += \", color=firebrick\";\n } else if (convex) {\n parent += \", color=dodgerblue2\";\n }\n parent += \"];\\n\";\n\n s << parent;\n parentnum = num;\n\n for (sharedClique c : clique->children) {\n num++;\n dot(s, c, keyFormatter, parentnum);\n }\n}", "docstring": "/* ************************************************************************* */", "url": "https://github.com/rpl-cmu/risam/blob/a6f9b4459e6820f81f6d2d113d57d43638ae32a3/risam/src/RISAM2.cpp#L769-L820", "sha": "a6f9b4459e6820f81f6d2d113d57d43638ae32a3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "main", "code": "int main() {\n DynamicJsonDocument doc(1024);\n doc[\"dummy\"].as();\n}", "docstring": "// See issue #1498", "url": "https://github.com/lnbits/nostr-signing-device/blob/1956e5933b3da5e49d30db9b8597497b157a5cbc/libraries/ArduinoJson/extras/tests/FailingBuilds/variant_as_char.cpp#L9-L12", "sha": "1956e5933b3da5e49d30db9b8597497b157a5cbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TFT_eSPI::begin_tft_write", "code": "inline void TFT_eSPI::begin_tft_write(void){\n if (locked) {\n locked = false; // Flag to show SPI access now unlocked\n#if defined (SPI_HAS_TRANSACTION) && defined (SUPPORT_TRANSACTIONS) && !defined(TFT_PARALLEL_8_BIT) && !defined(RP2040_PIO_INTERFACE)\n spi.beginTransaction(SPISettings(SPI_FREQUENCY, MSBFIRST, TFT_SPI_MODE));\n#endif\n CS_L;\n SET_BUS_WRITE_MODE; // Some processors (e.g. ESP32) allow recycling the tx buffer when rx is not used\n }\n}", "docstring": "/***************************************************************************************\n** Function name: begin_tft_write (was called spi_begin)\n** Description: Start SPI transaction for writes and select TFT\n***************************************************************************************/", "url": "https://github.com/lnbits/nostr-signing-device/blob/1956e5933b3da5e49d30db9b8597497b157a5cbc/libraries/TFT_eSPI/TFT_eSPI.cpp#L74-L83", "sha": "1956e5933b3da5e49d30db9b8597497b157a5cbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Cipher", "code": "static void Cipher(state_t* state, const uint8_t* RoundKey)\n{\n uint8_t round = 0;\n\n // Add the First round key to the state before starting the rounds.\n AddRoundKey(0, state, RoundKey);\n\n // There will be Nr rounds.\n // The first Nr-1 rounds are identical.\n // These Nr rounds are executed in the loop below.\n // Last one without MixColumns()\n for (round = 1; ; ++round)\n {\n SubBytes(state);\n ShiftRows(state);\n if (round == Nr) {\n break;\n }\n MixColumns(state);\n AddRoundKey(round, state, RoundKey);\n }\n // Add round key to last round\n AddRoundKey(Nr, state, RoundKey);\n}", "docstring": "// #if (defined(CBC) && CBC == 1) || (defined(ECB) && ECB == 1)\n// Cipher is the main function that encrypts the PlainText.", "url": "https://github.com/lnbits/nostr-signing-device/blob/1956e5933b3da5e49d30db9b8597497b157a5cbc/libraries/tiny-AES-c/aes.cpp#L413-L436", "sha": "1956e5933b3da5e49d30db9b8597497b157a5cbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "dequantize_mul_mat_vec_q5_k", "code": "static void dequantize_mul_mat_vec_q5_k(const void *__restrict__ vx,\n const float *__restrict__ yy,\n float *__restrict__ dst,\n const int ncols,\n const sycl::nd_item<3> &item_ct1) {\n\n const int row = item_ct1.get_group(2);\n const int num_blocks_per_row = ncols / QK_K;\n const int ib0 = row*num_blocks_per_row;\n\n const block_q5_K * x = (const block_q5_K *)vx + ib0;\n\n float tmp = 0; // partial sum for thread in warp\n\n#if QK_K == 256\n const uint16_t kmask1 = 0x3f3f;\n const uint16_t kmask2 = 0x0f0f;\n const uint16_t kmask3 = 0xc0c0;\n\n const int tid = item_ct1.get_local_id(2) / 2; // 0...15\n const int ix = item_ct1.get_local_id(2) % 2;\n\n const int il = tid/4; // 0...3\n const int ir = tid - 4*il;// 0...3\n const int n = 2;\n\n const int im = il/2; // 0 or 1. 0 computes 0,32 + 128,160, 1 computes 64,96 + 192,224\n const int in = il%2;\n\n const int l0 = n*(2*ir + in);\n const int q_offset = 32*im + l0;\n const int y_offset = 64*im + l0;\n\n const uint8_t hm1 = 1 << (2*im);\n const uint8_t hm2 = hm1 << 4;\n\n uint16_t aux[4];\n const uint8_t * sc = (const uint8_t *)aux;\n\n uint16_t q16[8];\n const uint8_t * q4 = (const uint8_t *)q16;\n\n for (int i = ix; i < num_blocks_per_row; i += 2) {\n\n const uint8_t * ql1 = x[i].qs + q_offset;\n const uint8_t * qh = x[i].qh + l0;\n const float * y1 = yy + i*QK_K + y_offset;\n const float * y2 = y1 + 128;\n\n const float dall = x[i].dm[0];\n const float dmin = x[i].dm[1];\n\n const uint16_t * a = (const uint16_t *)x[i].scales;\n aux[0] = a[im+0] & kmask1;\n aux[1] = a[im+2] & kmask1;\n aux[2] = ((a[im+4] >> 0) & kmask2) | ((a[im+0] & kmask3) >> 2);\n aux[3] = ((a[im+4] >> 4) & kmask2) | ((a[im+2] & kmask3) >> 2);\n\n sycl::float4 sum = {0.f, 0.f, 0.f, 0.f};\n float smin = 0;\n const uint16_t * q1 = (const uint16_t *)ql1;\n const uint16_t * q2 = q1 + 32;\n q16[0] = q1[0] & 0x0f0f;\n q16[1] = q1[8] & 0x0f0f;\n q16[2] = (q1[0] >> 4) & 0x0f0f;\n q16[3] = (q1[8] >> 4) & 0x0f0f;\n q16[4] = q2[0] & 0x0f0f;\n q16[5] = q2[8] & 0x0f0f;\n q16[6] = (q2[0] >> 4) & 0x0f0f;\n q16[7] = (q2[8] >> 4) & 0x0f0f;\n for (int l = 0; l < n; ++l) {\n sum.x() +=\n y1[l + 0] * (q4[l + 0] + (qh[l + 0] & (hm1 << 0) ? 16 : 0)) +\n y1[l + 16] * (q4[l + 2] + (qh[l + 16] & (hm1 << 0) ? 16 : 0));\n sum.y() +=\n y1[l + 32] * (q4[l + 4] + (qh[l + 0] & (hm1 << 1) ? 16 : 0)) +\n y1[l + 48] * (q4[l + 6] + (qh[l + 16] & (hm1 << 1) ? 16 : 0));\n sum.z() +=\n y2[l + 0] * (q4[l + 8] + (qh[l + 0] & (hm2 << 0) ? 16 : 0)) +\n y2[l + 16] * (q4[l + 10] + (qh[l + 16] & (hm2 << 0) ? 16 : 0));\n sum.w() +=\n y2[l + 32] * (q4[l + 12] + (qh[l + 0] & (hm2 << 1) ? 16 : 0)) +\n y2[l + 48] * (q4[l + 14] + (qh[l + 16] & (hm2 << 1) ? 16 : 0));\n smin += (y1[l] + y1[l+16]) * sc[2] + (y1[l+32] + y1[l+48]) * sc[3]\n + (y2[l] + y2[l+16]) * sc[6] + (y2[l+32] + y2[l+48]) * sc[7];\n }\n tmp += dall * (sum.x() * sc[0] + sum.y() * sc[1] + sum.z() * sc[4] +\n sum.w() * sc[5]) -\n dmin * smin;\n }\n\n#else\n const int tid = item_ct1.get_local_id(2)/(2*K_QUANTS_PER_ITERATION); // 0...15\n const int ix = item_ct1.get_local_id(2)%(2*K_QUANTS_PER_ITERATION);\n const int step = tid * K_QUANTS_PER_ITERATION;\n const int im = step/8;\n const int in = step%8;\n\n for (int i = ix; i < num_blocks_per_row; i += 2*K_QUANTS_PER_ITERATION) {\n const uint8_t * q = x[i].qs + step;\n const int8_t * s = x[i].scales;\n const float * y = yy + i*QK_K + step;\n const float d = x[i].d;\n float sum = 0.f;\n for (int j = 0; j < K_QUANTS_PER_ITERATION; ++j) {\n const uint8_t h = x[i].qh[in+j] >> im;\n sum += y[j+ 0] * d * s[0] * ((q[j+ 0] & 0xF) - ((h >> 0) & 1 ? 0 : 16))\n + y[j+16] * d * s[1] * ((q[j+16] & 0xF) - ((h >> 2) & 1 ? 0 : 16))\n + y[j+32] * d * s[2] * ((q[j+ 0] >> 4) - ((h >> 4) & 1 ? 0 : 16))\n + y[j+48] * d * s[3] * ((q[j+16] >> 4) - ((h >> 6) & 1 ? 0 : 16));\n }\n tmp += sum;\n }\n#endif\n\n // sum up partial sums and write back result\n#pragma unroll\n for (int mask = QK_WARP_SIZE / 2; mask > 0; mask >>= 1) {\n tmp +=\n dpct::permute_sub_group_by_xor(item_ct1.get_sub_group(), tmp, mask);\n }\n\n if (item_ct1.get_local_id(2) == 0) {\n dst[row] = tmp;\n }\n}", "docstring": "/*\nDPCT1110:7: The total declared local variable size in device function\ndequantize_mul_mat_vec_q5_k exceeds 128 bytes and may cause high register\npressure. Consult with your hardware vendor to find the total register size\navailable and adjust the code, or use smaller sub-group size to avoid high\nregister pressure.\n*/", "url": "https://github.com/ggerganov/llama.cpp/blob/4078c77f9891831f29ffc7c315c8ec6695ba5ce7/ggml/src/ggml-sycl/dmmv.cpp#L520-L645", "sha": "4078c77f9891831f29ffc7c315c8ec6695ba5ce7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "vars", "code": "std::string vars() override {\n return VARS_TO_STR6(type, n, m, r, b, v);\n }", "docstring": "// view (non-contiguous src1)", "url": "https://github.com/ggerganov/llama.cpp/blob/4078c77f9891831f29ffc7c315c8ec6695ba5ce7/tests/test-backend-ops.cpp#L1086-L1088", "sha": "4078c77f9891831f29ffc7c315c8ec6695ba5ce7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DriveWidget::leaveEvent", "code": "void DriveWidget::leaveEvent(QEvent *event)\n{\n stop();\n}", "docstring": "// When the mouse leaves the widget but the button is still held down,\n// we don't get the leaveEvent() because the mouse is \"grabbed\" (by\n// default from Qt). However, when the mouse drags out of the widget\n// and then other buttons are pressed (or possibly other\n// window-manager things happen), we will get a leaveEvent() but not a\n// mouseReleaseEvent(). Without catching this event you can have a\n// robot stuck \"on\" without the user controlling it.", "url": "https://github.com/L5Player/AutoDriving-Planning-Control-Algorithm-Simulation-Carla/blob/cf02ec2e9d7df21f2a021a11880e382d654de5d8/src/ros-bridge/rviz_carla_plugin/src/drive_widget.cpp#L209-L212", "sha": "cf02ec2e9d7df21f2a021a11880e382d654de5d8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "detexDecompressBlockBPTC_SIGNED_FLOAT", "code": "bool detexDecompressBlockBPTC_SIGNED_FLOAT(const uint8_t * DETEX_RESTRICT bitstring,\nuint32_t mode_mask, uint32_t flags, uint8_t * DETEX_RESTRICT pixel_buffer) {\n\treturn DecompressBlockBPTCFloatShared(bitstring, mode_mask, flags, true,\n\t\tpixel_buffer);\n}", "docstring": "/* Decompress a 128-bit 4x4 pixel texture block compressed using the */\n/* BPTC_FLOAT (BC6H_FLOAT) format. The output format is */\n/* DETEX_PIXEL_FORMAT_SIGNED_FLOAT_RGBX16. */", "url": "https://github.com/JsonAsAsset/JsonAsAsset/blob/75767cc02d79923d7411546856e75e956da46d47/Source/Detex/ThirdParty/detex/decompress-bptc-float.cpp#L640-L644", "sha": "75767cc02d79923d7411546856e75e956da46d47"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BlockATI1::flip4", "code": "void BlockATI1::flip4()\n{\n\talpha.flip4();\n}", "docstring": "/// Flip ATI1 block vertically.", "url": "https://github.com/JsonAsAsset/JsonAsAsset/blob/75767cc02d79923d7411546856e75e956da46d47/Source/NVTT/ThirdParty/nvtt/nvimage/BlockDXT.cpp#L504-L507", "sha": "75767cc02d79923d7411546856e75e956da46d47"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TileWorkQueue::clampToPass", "code": "void\nTileWorkQueue::clampToPass(unsigned passIdx)\n{\n MNRY_ASSERT(mNumPasses);\n passIdx = std::min(passIdx, mNumPasses - 1u);\n mGroupClampIdx = mPassInfos[passIdx].mEndGroupIdx;\n}", "docstring": "// Executes the up until and including this pass and then stops.", "url": "https://github.com/dreamworksanimation/moonray/blob/540f3e757ec334bf68a0a47222ad204ed2e3de04/lib/rendering/rndr/TileWorkQueue.cc#L165-L171", "sha": "540f3e757ec334bf68a0a47222ad204ed2e3de04"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CharIterator::CharIterator", "code": "CharIterator::CharIterator ()\n{\n it = NULL;\n}", "docstring": "// -----------------\n// CharIterator\n// -----------------", "url": "https://github.com/crossbowerbt/dillo-plus/blob/7d093e6bddcb3338938ea5959844e62ff1f9b76f/dw/iterator.cc#L736-L739", "sha": "7d093e6bddcb3338938ea5959844e62ff1f9b76f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "a_Menu_bugmeter_popup", "code": "void a_Menu_bugmeter_popup(BrowserWindow *bw, const DilloUrl *url)\n{\n static Fl_Menu_Item pm[] = {\n {\"Validate URL with W3C\", 0, Menu_bugmeter_validate_w3c_cb,0,0,0,0,0,0},\n {\"Validate URL with WDG\", 0, Menu_bugmeter_validate_wdg_cb, 0,\n FL_MENU_DIVIDER,0,0,0,0},\n {\"About bug meter\", 0, Menu_bugmeter_about_cb,0,0,0,0,0,0},\n {0,0,0,0,0,0,0,0,0}\n };\n\n popup_x = Fl::event_x();\n popup_y = Fl::event_y();\n popup_bw = bw;\n a_Url_free(popup_url);\n popup_url = a_Url_dup(url);\n\n a_Timeout_add(0.0, Menu_simple_popup_cb, (void*)pm);\n}", "docstring": "/*\n * Bugmeter popup menu (construction & popup)\n */", "url": "https://github.com/crossbowerbt/dillo-plus/blob/7d093e6bddcb3338938ea5959844e62ff1f9b76f/src/menu.cc#L614-L631", "sha": "7d093e6bddcb3338938ea5959844e62ff1f9b76f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ClipperBase::PopScanbeam", "code": "bool ClipperBase::PopScanbeam(cInt &Y) {\n if (m_Scanbeam.empty())\n return false;\n Y = m_Scanbeam.top();\n m_Scanbeam.pop();\n while (!m_Scanbeam.empty() && Y == m_Scanbeam.top()) {\n m_Scanbeam.pop();\n } // Pop duplicates.\n return true;\n}", "docstring": "//------------------------------------------------------------------------------", "url": "https://github.com/TalkUHulk/ai.deploy.box/blob/f937195eab6de38078d1524dae598fd5f142c8c8/source/utility/clipper.cpp#L1283-L1292", "sha": "f937195eab6de38078d1524dae598fd5f142c8c8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "insertAtTail", "code": "void insertAtTail(Node* &tail, int d) {\n Node* temp = new Node(d);\n tail->next = temp;\n tail = temp;\n}", "docstring": "// inserting new node at tail or ending", "url": "https://github.com/kishanrajput23/Love-Babbar-CPP-DSA-Course/blob/1876e75344fe102245dd18802d2c9540e70f5525/Lectures/Lecture_44/Lecture_Codes/Singly_Linked_List/05_deleting_node.cpp#L34-L38", "sha": "1876e75344fe102245dd18802d2c9540e70f5525"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "main", "code": "int main(int argc, char** argv)\n{\n std::vector inputFiles;\n std::vector outputDirOrFiles;\n std::string whiteListFile;\n int opts;\n int verbosity;\n\n // handle errors by exiting\n spv::spirvbin_t::registerErrorHandler(errHandler);\n\n // Log messages to std::cout\n spv::spirvbin_t::registerLogHandler(logHandler);\n\n if (argc < 2)\n usage(argv[0]);\n\n parseCmdLine(argc, argv, inputFiles, outputDirOrFiles, whiteListFile, opts, verbosity);\n\n if (outputDirOrFiles.empty())\n usage(argv[0], \"Output directory or file(s) required.\");\n\n const bool isMultiInput = inputFiles.size() > 1;\n const bool isMultiOutput = outputDirOrFiles.size() > 1;\n const bool isSingleOutputDir = !isMultiOutput && std::filesystem::is_directory(outputDirOrFiles[0]);\n\n if (isMultiInput && !isMultiOutput && !isSingleOutputDir)\n usage(argv[0], \"Output is not a directory.\");\n\n\n if (isMultiInput && isMultiOutput && (outputDirOrFiles.size() != inputFiles.size()))\n usage(argv[0], \"Output must be either a single directory or one output file per input.\");\n\n // Main operations: read, remap, and write.\n execute(inputFiles, outputDirOrFiles, isSingleOutputDir, whiteListFile, opts, verbosity);\n\n // If we get here, everything went OK! Nothing more to be done.\n}", "docstring": "// namespace", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/libraries/ZVulkan/src/glslang/StandAlone/spirv-remap.cpp#L365-L402", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TShader::setHlslIoMapping", "code": "void TShader::setHlslIoMapping(bool hlslIoMap) { intermediate->setHlslIoMapping(hlslIoMap); }", "docstring": "// See comment above TDefaultHlslIoMapper in iomapper.cpp:", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/libraries/ZVulkan/src/glslang/glslang/MachineIndependent/ShaderLang.cpp#L1854-L1854", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TIntermediate::addUsedLocation", "code": "int TIntermediate::addUsedLocation(const TQualifier& qualifier, const TType& type, bool& typeCollision)\n{\n typeCollision = false;\n\n int set;\n if (qualifier.isPipeInput())\n set = 0;\n else if (qualifier.isPipeOutput())\n set = 1;\n else if (qualifier.storage == EvqUniform)\n set = 2;\n else if (qualifier.storage == EvqBuffer)\n set = 3;\n else if (qualifier.storage == EvqTileImageEXT)\n set = 4;\n else if (qualifier.isAnyPayload())\n set = 0;\n else if (qualifier.isAnyCallable())\n set = 1;\n else if (qualifier.isHitObjectAttrNV())\n set = 2;\n else\n return -1;\n\n int size;\n if (qualifier.isAnyPayload() || qualifier.isAnyCallable()) {\n size = 1;\n } else if (qualifier.isUniformOrBuffer() || qualifier.isTaskMemory()) {\n if (type.isSizedArray())\n size = type.getCumulativeArraySize();\n else\n size = 1;\n } else {\n // Strip off the outer array dimension for those having an extra one.\n if (type.isArray() && qualifier.isArrayedIo(language)) {\n TType elementType(type, 0);\n size = computeTypeLocationSize(elementType, language);\n } else\n size = computeTypeLocationSize(type, language);\n }\n\n // Locations, and components within locations.\n //\n // Almost always, dealing with components means a single location is involved.\n // The exception is a dvec3. From the spec:\n //\n // \"A dvec3 will consume all four components of the first location and components 0 and 1 of\n // the second location. This leaves components 2 and 3 available for other component-qualified\n // declarations.\"\n //\n // That means, without ever mentioning a component, a component range\n // for a different location gets specified, if it's not a vertex shader input. (!)\n // (A vertex shader input will show using only one location, even for a dvec3/4.)\n //\n // So, for the case of dvec3, we need two independent ioRanges.\n //\n // For raytracing IO (payloads and callabledata) each declaration occupies a single\n // slot irrespective of type.\n int collision = -1; // no collision\n if (qualifier.isAnyPayload() || qualifier.isAnyCallable() || qualifier.isHitObjectAttrNV()) {\n TRange range(qualifier.layoutLocation, qualifier.layoutLocation);\n collision = checkLocationRT(set, qualifier.layoutLocation);\n if (collision < 0)\n usedIoRT[set].push_back(range);\n return collision;\n }\n if (size == 2 && type.getBasicType() == EbtDouble && type.getVectorSize() == 3 &&\n (qualifier.isPipeInput() || qualifier.isPipeOutput())) {\n // Dealing with dvec3 in/out split across two locations.\n // Need two io-ranges.\n // The case where the dvec3 doesn't start at component 0 was previously caught as overflow.\n\n // First range:\n TRange locationRange(qualifier.layoutLocation, qualifier.layoutLocation);\n TRange componentRange(0, 3);\n TIoRange range(locationRange, componentRange, type.getBasicType(), 0, qualifier.centroid, qualifier.smooth, qualifier.flat);\n\n // check for collisions\n collision = checkLocationRange(set, range, type, typeCollision);\n if (collision < 0) {\n usedIo[set].push_back(range);\n\n // Second range:\n TRange locationRange2(qualifier.layoutLocation + 1, qualifier.layoutLocation + 1);\n TRange componentRange2(0, 1);\n TIoRange range2(locationRange2, componentRange2, type.getBasicType(), 0, qualifier.centroid, qualifier.smooth, qualifier.flat);\n\n // check for collisions\n collision = checkLocationRange(set, range2, type, typeCollision);\n if (collision < 0)\n usedIo[set].push_back(range2);\n }\n return collision;\n }\n\n // Not a dvec3 in/out split across two locations, generic path.\n // Need a single IO-range block.\n\n TRange locationRange(qualifier.layoutLocation, qualifier.layoutLocation + size - 1);\n TRange componentRange(0, 3);\n if (qualifier.hasComponent() || type.getVectorSize() > 0) {\n int consumedComponents = type.getVectorSize() * (type.getBasicType() == EbtDouble ? 2 : 1);\n if (qualifier.hasComponent())\n componentRange.start = qualifier.layoutComponent;\n componentRange.last = componentRange.start + consumedComponents - 1;\n }\n\n // combine location and component ranges\n TBasicType basicTy = type.getBasicType();\n if (basicTy == EbtSampler && type.getSampler().isAttachmentEXT())\n basicTy = type.getSampler().type;\n TIoRange range(locationRange, componentRange, basicTy, qualifier.hasIndex() ? qualifier.getIndex() : 0, qualifier.centroid, qualifier.smooth, qualifier.flat);\n\n // check for collisions, except for vertex inputs on desktop targeting OpenGL\n if (! (!isEsProfile() && language == EShLangVertex && qualifier.isPipeInput()) || spvVersion.vulkan > 0)\n collision = checkLocationRange(set, range, type, typeCollision);\n\n if (collision < 0)\n usedIo[set].push_back(range);\n\n return collision;\n}", "docstring": "// Accumulate locations used for inputs, outputs, and uniforms, payload, callable data, and tileImageEXT\n// and check for collisions as the accumulation is done.\n//\n// Returns < 0 if no collision, >= 0 if collision and the value returned is a colliding value.\n//\n// typeCollision is set to true if there is no direct collision, but the types in the same location\n// are different.\n//", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/libraries/ZVulkan/src/glslang/glslang/MachineIndependent/linkValidate.cpp#L1617-L1738", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SetupGenMidi", "code": "static void SetupGenMidi()\n{\n\t// The OPL renderer should not care about where this comes from.\n\t// Note: No I_Error here - this needs to be consistent with the rest of the music code.\n\tauto lump = fileSystem.CheckNumForName(\"GENMIDI\", ns_global);\n\tif (lump < 0)\n\t{\n\t\tPrintf(\"No GENMIDI lump found. OPL playback not available.\\n\");\n\t\treturn;\n\t}\n\tauto genmidi = fileSystem.ReadFile(lump);\n\n\tif (genmidi.size() < 8 + 175 * 36 || memcmp(genmidi.data(), \"#OPL_II#\", 8)) return;\n\tZMusic_SetGenMidi(genmidi.bytes() + 8);\n}", "docstring": "//==========================================================================\n//\n// Pass some basic working data to the music backend\n// We do this once at startup for everything available.\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/audio/music/i_music.cpp#L173-L187", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SoundEngine::UnlinkChannel", "code": "void SoundEngine::UnlinkChannel(FSoundChan *chan)\n{\n\t*(chan->PrevChan) = chan->NextChan;\n\tif (chan->NextChan != NULL)\n\t{\n\t\tchan->NextChan->PrevChan = chan->PrevChan;\n\t}\n}", "docstring": "//==========================================================================\n//\n// S_UnlinkChannel\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/audio/sound/s_sound.cpp#L252-L259", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InterplayDecoder::DecodeBlock7", "code": "void InterplayDecoder::DecodeBlock7(int32_t offset)\n{\n uint8_t* pBuffer = GetCurrentFrame() + (intptr_t)offset;\n uint32_t flags = 0;\n\n uint8_t P[2];\n P[0] = *ChunkPtr++;\n P[1] = *ChunkPtr++;\n\n // 2-color encoding\n if (P[0] <= P[1])\n {\n // need 8 more bytes from the stream\n for (int y = 0; y < 8; y++)\n {\n flags = (*ChunkPtr++) | 0x100;\n for (; flags != 1; flags >>= 1) {\n *pBuffer++ = P[flags & 1];\n }\n pBuffer += (videoStride - 8);\n }\n }\n else\n {\n // need 2 more bytes from the stream\n flags = LE_16(ChunkPtr);\n ChunkPtr += 2;\n\n for (int y = 0; y < 8; y += 2)\n {\n for (int x = 0; x < 8; x += 2, flags >>= 1)\n {\n pBuffer[x] =\n pBuffer[x + 1] =\n pBuffer[x + videoStride] =\n pBuffer[x + 1 + videoStride] = P[flags & 1];\n }\n pBuffer += videoStride * 2;\n }\n }\n}", "docstring": "// Block6 is unknown and skipped", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/cutscenes/playmve.cpp#L822-L862", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Atomic", "code": "static void Atomic()\n{\n\t// Flip current white\n\tCurrentWhite = OtherWhite();\n\tSweepPos = &Root;\n\tState = GCS_Sweep;\n\tEstimate = AllocBytes;\n}", "docstring": "//==========================================================================\n//\n// Atomic\n//\n// If there were any propagations that needed to be done atomicly, they\n// would go here. It also sets things up for the sweep state.\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/objects/dobjgc.cpp#L419-L426", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FRawMouse::~FRawMouse", "code": "FRawMouse::~FRawMouse()\n{\n\tUngrab();\n}", "docstring": "//==========================================================================\n//\n// FRawMouse - Destructor\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/platform/win32/i_mouse.cpp#L491-L494", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FDInputMouse::Ungrab", "code": "void FDInputMouse::Ungrab()\n{\n\tDevice->Unacquire();\n\tGrabbed = false;\n\tSetCursorState(true);\n\tClearButtonState();\n}", "docstring": "//==========================================================================\n//\n// FDInputMouse :: Ungrab\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/platform/win32/i_mouse.cpp#L862-L868", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FRawPS2Manager::GetDevices", "code": "void FRawPS2Manager::GetDevices(TArray &sticks)\n{\n\tfor (unsigned i = 0; i < Devices.Size(); ++i)\n\t{\n\t\tif (Devices[i]->IsConnected())\n\t\t{\n\t\t\tsticks.Push(Devices[i]);\n\t\t}\n\t}\n}", "docstring": "//===========================================================================\n//\n// FRawPS2Manager :: GetJoysticks\n//\n// Adds the IJoystick interfaces for each device we created to the sticks\n// array, if they are detected as connected.\n//\n//===========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/platform/win32/i_rawps2.cpp#L979-L988", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FxBoolCast::Emit", "code": "ExpEmit FxBoolCast::Emit(VMFunctionBuilder *build)\n{\n\tExpEmit from = basex->Emit(build);\n\t\n\tif(from.Konst && from.RegType == REGT_INT)\n\t{ // this is needed here because the int const assign optimization returns a constant\n\t\tExpEmit to;\n\t\tto.Konst = true;\n\t\tto.RegType = REGT_INT;\n\t\tto.RegNum = build->GetConstantInt(!!build->FindConstantInt(from.RegNum));\n\t\treturn to;\n\t}\n\t\n\n\tassert(!from.Konst);\n\tassert(basex->ValueType->GetRegType() == REGT_INT || basex->ValueType->GetRegType() == REGT_FLOAT || basex->ValueType->GetRegType() == REGT_POINTER);\n\n\tif (NeedValue)\n\t{\n\t\tExpEmit to(build, REGT_INT);\n\t\tfrom.Free(build);\n\t\tbuild->Emit(OP_CASTB, to.RegNum, from.RegNum, from.RegType == REGT_INT ? CASTB_I : from.RegType == REGT_FLOAT ? CASTB_F : CASTB_A);\n\t\treturn to;\n\t}\n\telse\n\t{\n\t\treturn from;\n\t}\n}", "docstring": "//==========================================================================\n//\n//\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/scripting/backend/codegen.cpp#L926-L954", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DStatusBarCore::StatusbarToRealCoords", "code": "void DStatusBarCore::StatusbarToRealCoords(double& x, double& y, double& w, double& h) const\n{\n\tif (SBarScale.X == -1 || ForcedScale)\n\t{\n\t\tint hres = HorizontalResolution;\n\t\tint vres = VerticalResolution;\n\t\tValidateResolution(hres, vres);\n\n\t\tVirtualToRealCoords(twod, x, y, w, h, hres, vres, true, true);\n\t}\n\telse\n\t{\n\t\tx = ST_X + x * SBarScale.X;\n\t\ty = ST_Y + y * SBarScale.Y;\n\t\tw *= SBarScale.X;\n\t\th *= SBarScale.Y;\n\t}\n}", "docstring": "//============================================================================\n//\n// draw stuff\n//\n//============================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/statusbar/base_sbar.cpp#L447-L464", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MD5Context::Final", "code": "void MD5Context::Final(uint8_t digest[16])\n{\n\tint count = bytes[0] & 0x3f;\t/* Number of bytes in ctx->in */\n\tuint8_t *p = (uint8_t *)in + count;\n\n\t/* Set the first char of padding to 0x80. There is always room. */\n\t*p++ = 0x80;\n\n\t/* Bytes of padding needed to make 56 bytes (-8..55) */\n\tcount = 56 - 1 - count;\n\n\tif (count < 0)\t\t/* Padding forces an extra block */\n\t{\n\t\tmemset(p, 0, count + 8);\n\t\tbyteSwap(in, 16);\n\t\tMD5Transform(buf, in);\n\t\tp = (uint8_t *)in;\n\t\tcount = 56;\n\t}\n\tmemset(p, 0, count);\n\tbyteSwap(in, 14);\n\n\t/* Append length in bits and transform */\n\tin[14] = bytes[0] << 3;\n\tin[15] = (bytes[1] << 3) | (bytes[0] >> 29);\n\tMD5Transform(buf, in);\n\n\tbyteSwap(buf, 4);\n\tmemcpy(digest, buf, 16);\n\tmemset(this, 0, sizeof(*this));\t/* In case it's sensitive */\n}", "docstring": "/*\n * Final wrapup - pad to 64-byte boundary with the bit pattern \n * 1 0* (64-bit count of bits processed, MSB-first)\n */", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/common/thirdparty/md5.cpp#L100-L130", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FMugShot::Reset", "code": "void FMugShot::Reset()\n{\n\tFaceHealthNow = FaceHealthLast = -1;\n\tbEvilGrin = false;\n\tbNormal = true;\n\tbDamageFaceActive = false;\n\tbOuchActive = false;\n\tCurrentState = NULL;\n\tRampageTimer = 0;\n\tLastDamageAngle = 1;\n}", "docstring": "//===========================================================================\n//\n// FMugShot :: Reset\n//\n//===========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/g_statusbar/sbar_mugshot.cpp#L223-L233", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DBaseStatusBar::DrawBottomStuff", "code": "void DBaseStatusBar::DrawBottomStuff (EHudState state)\n{\n\tprimaryLevel->localEventManager->RenderUnderlay(state);\n\tDrawMessages (HUDMSGLayer_UnderHUD, (state == HUD_StatusBar) ? GetTopOfStatusbar() : twod->GetHeight());\n}", "docstring": "//---------------------------------------------------------------------------\n//\n// DrawBottomStuff\n//\n//---------------------------------------------------------------------------", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/g_statusbar/shared_sbar.cpp#L1187-L1191", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FTextureAnimator::UpdateAnimations", "code": "void FTextureAnimator::UpdateAnimations (uint64_t mstime)\n{\n\tfor (unsigned int i = 0; i < mFireTextures.Size(); i++)\n\t{\n\t\tFFireTexture* fire = &mFireTextures[i];\n\t\tbool updated = false;\n\n\t\tif (fire->SwitchTime == 0)\n\t\t{\n\t\t\tfire->SwitchTime = mstime + fire->Duration;\n\t\t}\n\t\telse while (fire->SwitchTime <= mstime)\n\t\t{\n\t\t\tstatic_cast(fire->texture->GetTexture())->Update();\n\t\t\tfire->SwitchTime = mstime + fire->Duration;\n\t\t\tupdated = true;\n\t\t}\n\n\t\tif (updated)\n\t\t{\n\t\t\tfire->texture->CleanHardwareData();\n\n\t\t\tif (fire->texture->GetSoftwareTexture())\n\t\t\t\tdelete fire->texture->GetSoftwareTexture();\n\n\t\t\tfire->texture->SetSoftwareTexture(nullptr);\n\t\t}\n\t}\n\tfor (unsigned int j = 0; j < mAnimations.Size(); ++j)\n\t{\n\t\tFAnimDef *anim = &mAnimations[j];\n\n\t\t// If this is the first time through R_UpdateAnimations, just\n\t\t// initialize the anim's switch time without actually animating.\n\t\tif (anim->SwitchTime == 0)\n\t\t{\n\t\t\tanim->SetSwitchTime (mstime);\n\t\t}\n\t\telse while (anim->SwitchTime <= mstime)\n\t\t{ // Multiple frames may have passed since the last time calling\n\t\t // R_UpdateAnimations, so be sure to loop through them all.\n\n\t\t\tAdvanceFrame(anim->CurFrame, anim->AnimType, *anim);\n\t\t\tanim->SetSwitchTime (mstime);\n\t\t}\n\n\t\tif (anim->bDiscrete)\n\t\t{\n\t\t\tTexMan.SetTranslation (anim->BasePic, anim->Frames[anim->CurFrame].FramePic);\n\t\t}\n\t\telse\n\t\t{\n\t\t\tfor (unsigned int i = 0; i < anim->NumFrames; i++)\n\t\t\t{\n\t\t\t\tTexMan.SetTranslation (anim->BasePic + i, anim->BasePic + (i + anim->CurFrame) % anim->NumFrames);\n\t\t\t}\n\t\t}\n\t}\n}", "docstring": "//==========================================================================\n//\n// FTextureAnimator :: UpdateAnimations\n//\n// Updates texture translations for each animation and scrolls the skies.\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/gamedata/textures/animations.cpp#L1130-L1188", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DBaseDecal::SetShade", "code": "void DBaseDecal::SetShade (int r, int g, int b)\n{\n\tAlphaColor = MAKEARGB(ColorMatcher.Pick (r, g, b), r, g, b);\n}", "docstring": "//----------------------------------------------------------------------------\n//\n//\n//\n//----------------------------------------------------------------------------", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/playsim/a_decals.cpp#L245-L248", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CheckForPushSpecial", "code": "static void CheckForPushSpecial(line_t *line, int side, AActor *mobj, DVector2 *posforwindowcheck)\n{\n\tif (line->special && !(mobj->flags6 & MF6_NOTRIGGER))\n\t{\n\t\tif (posforwindowcheck && !(mobj->Level->i_compatflags2 & COMPATF2_PUSHWINDOW) && line->backsector != NULL)\n\t\t{ // Make sure this line actually blocks us and is not a window\n\t\t\t// or similar construct we are standing inside of.\n\t\t\tDVector3 pos = mobj->PosRelative(line);\n\t\t\tdouble fzt = line->frontsector->ceilingplane.ZatPoint(*posforwindowcheck);\n\t\t\tdouble fzb = line->frontsector->floorplane.ZatPoint(*posforwindowcheck);\n\t\t\tdouble bzt = line->backsector->ceilingplane.ZatPoint(*posforwindowcheck);\n\t\t\tdouble bzb = line->backsector->floorplane.ZatPoint(*posforwindowcheck);\n\t\t\tif (fzt >= mobj->Top() && bzt >= mobj->Top() &&\n\t\t\t\tfzb <= mobj->Z() && bzb <= mobj->Z())\n\t\t\t{\n\t\t\t\tif (line->flags & ML_3DMIDTEX)\n\t\t\t\t{\n\t\t\t\t\tdouble top, bot;\n\t\t\t\t\tP_GetMidTexturePosition(line, side, &top, &bot);\n\t\t\t\t\tif (bot < mobj->Top() && top > mobj->Z())\n\t\t\t\t\t{\n\t\t\t\t\t\tgoto isblocking;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// we must also check if some 3D floor in the backsector may be blocking\n\t\t\t\tfor (auto rover : line->backsector->e->XFloor.ffloors)\n\t\t\t\t{\n\t\t\t\t\tif (!(rover->flags & FF_SOLID) || !(rover->flags & FF_EXISTS)) continue;\n\n\t\t\t\t\tdouble ff_bottom = rover->bottom.plane->ZatPoint(*posforwindowcheck);\n\t\t\t\t\tdouble ff_top = rover->top.plane->ZatPoint(*posforwindowcheck);\n\n\t\t\t\t\tif (ff_bottom < mobj->Top() && ff_top > mobj->Z())\n\t\t\t\t\t{\n\t\t\t\t\t\tgoto isblocking;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\tisblocking:\n\t\tif (mobj->flags2 & MF2_PUSHWALL)\n\t\t{\n\t\t\tP_ActivateLine(line, mobj, side, SPAC_Push);\n\t\t}\n\t\telse if (mobj->flags2 & MF2_IMPACT)\n\t\t{\n\t\t\tif ((mobj->Level->flags2 & LEVEL2_MISSILESACTIVATEIMPACT) ||\n\t\t\t\t!(mobj->flags & MF_MISSILE) ||\n\t\t\t\t(mobj->target == NULL))\n\t\t\t{\n\t\t\t\tP_ActivateLine(line, mobj, side, SPAC_Impact);\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\tP_ActivateLine(line, mobj->target, side, SPAC_Impact);\n\t\t\t}\n\t\t}\n\t}\n}", "docstring": "//===========================================================================\n//\n// CheckForPushSpecial\n//\n//===========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/playsim/p_map.cpp#L2265-L2324", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "P_MoveThing", "code": "bool P_MoveThing(AActor *source, const DVector3 &pos, bool fog)\n{\n\tDVector3 old = source->Pos();\n\n\tsource->SetOrigin (pos, true);\n\tif (P_TestMobjLocation (source))\n\t{\n\t\tif (fog)\n\t\t{\n\t\t\tP_SpawnTeleportFog(source, pos, false, true);\n\t\t\tP_SpawnTeleportFog(source, old, true, true);\n\t\t}\n\t\tsource->ClearInterpolation();\n\t\tsource->renderflags |= RF_NOINTERPOLATEVIEW;\n\t\treturn true;\n\t}\n\telse\n\t{\n\t\tsource->SetOrigin (old, true);\n\t\treturn false;\n\t}\n}", "docstring": "// [BC] Added\n// [RH] Fixed", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/playsim/p_things.cpp#L117-L138", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FPolyObj::CheckMobjBlocking", "code": "bool FPolyObj::CheckMobjBlocking (side_t *sd)\n{\n\tstatic TArray checker;\n\tFBlockNode *block;\n\tAActor *mobj;\n\tint i, j, k;\n\tint left, right, top, bottom;\n\tline_t *ld;\n\tbool blocked;\n\tbool performBlockingThrust;\n\tint bmapwidth = Level->blockmap.bmapwidth;\n\tint bmapheight = Level->blockmap.bmapheight;\n\n\tld = sd->linedef;\n\n\ttop = Level->blockmap.GetBlockY(ld->bbox[BOXTOP]);\n\tbottom = Level->blockmap.GetBlockY(ld->bbox[BOXBOTTOM]);\n\tleft = Level->blockmap.GetBlockX(ld->bbox[BOXLEFT]);\n\tright = Level->blockmap.GetBlockX(ld->bbox[BOXRIGHT]);\n\n\tblocked = false;\n\tchecker.Clear();\n\n\tbottom = bottom < 0 ? 0 : bottom;\n\tbottom = bottom >= bmapheight ? bmapheight-1 : bottom;\n\ttop = top < 0 ? 0 : top;\n\ttop = top >= bmapheight ? bmapheight-1 : top;\n\tleft = left < 0 ? 0 : left;\n\tleft = left >= bmapwidth ? bmapwidth-1 : left;\n\tright = right < 0 ? 0 : right;\n\tright = right >= bmapwidth ? bmapwidth-1 : right;\n\n\tfor (j = bottom*bmapwidth; j <= top*bmapwidth; j += bmapwidth)\n\t{\n\t\tfor (i = left; i <= right; i++)\n\t\t{\n\t\t\tfor (block = Level->blockmap.blocklinks[j+i]; block != nullptr; block = block->NextActor)\n\t\t\t{\n\t\t\t\tmobj = block->Me;\n\t\t\t\tfor (k = (int)checker.Size()-1; k >= 0; --k)\n\t\t\t\t{\n\t\t\t\t\tif (checker[k] == mobj)\n\t\t\t\t\t{\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (k < 0)\n\t\t\t\t{\n\t\t\t\t\tchecker.Push (mobj);\n\t\t\t\t\tif ((mobj->flags&MF_SOLID) && !(mobj->flags&MF_NOCLIP))\n\t\t\t\t\t{\n\t\t\t\t\t\tFLineOpening open;\n\t\t\t\t\t\topen.top = LINEOPEN_MAX;\n\t\t\t\t\t\topen.bottom = LINEOPEN_MIN;\n\t\t\t\t\t\t// [TN] Check wether this actor gets blocked by the line.\n\t\t\t\t\t\tif (ld->backsector != nullptr && !P_IsBlockedByLine(mobj, ld) \n\t\t\t\t\t\t\t&& (!(ld->flags & ML_3DMIDTEX) ||\n\t\t\t\t\t\t\t\t(!P_LineOpening_3dMidtex(mobj, ld, open) &&\n\t\t\t\t\t\t\t\t\t(mobj->Top() < open.top)\n\t\t\t\t\t\t\t\t) || (open.abovemidtex && mobj->Z() > mobj->floorz))\n\t\t\t\t\t\t\t)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t// [BL] We can't just continue here since we must\n\t\t\t\t\t\t\t// determine if the line's backsector is going to\n\t\t\t\t\t\t\t// be blocked.\n\t\t\t\t\t\t\tperformBlockingThrust = false;\n\t\t\t\t\t\t}\n\t\t\t\t\t\telse\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tperformBlockingThrust = true;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tDVector2 pos = mobj->PosRelative(ld).XY();\n\t\t\t\t\t\tFBoundingBox box(pos.X, pos.Y, mobj->radius);\n\n\t\t\t\t\t\tif (!inRange(box, ld) || BoxOnLineSide(box, ld) != -1)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (ld->isLinePortal())\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t// Fixme: this still needs to figure out if the polyobject move made the player cross the portal line.\n\t\t\t\t\t\t\tif (P_TryMove(mobj, mobj->Pos().XY(), false))\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t// We have a two-sided linedef so we should only check one side\n\t\t\t\t\t\t// so that the thrust from both sides doesn't cancel each other out.\n\t\t\t\t\t\t// Best use the one facing the player and ignore the back side.\n\t\t\t\t\t\tif (ld->sidedef[1] != nullptr)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tint side = P_PointOnLineSidePrecise(mobj->Pos(), ld);\n\t\t\t\t\t\t\tif (ld->sidedef[side] != sd)\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t// [BL] See if we hit below the floor/ceiling of the poly.\n\t\t\t\t\t\t\telse if(!performBlockingThrust && (\n\t\t\t\t\t\t\t\t\tmobj->Z() < ld->sidedef[!side]->sector->GetSecPlane(sector_t::floor).ZatPoint(mobj) ||\n\t\t\t\t\t\t\t\t\tmobj->Top() > ld->sidedef[!side]->sector->GetSecPlane(sector_t::ceiling).ZatPoint(mobj)\n\t\t\t\t\t\t\t\t))\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\tperformBlockingThrust = true;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif(performBlockingThrust)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tThrustMobj (mobj, sd);\n\t\t\t\t\t\t\tblocked = true;\n\t\t\t\t\t\t}\n\t\t\t\t\t\telse\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn blocked;\n}", "docstring": "//==========================================================================\n//\n// CheckMobjBlocking\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/playsim/po_man.cpp#L1028-L1149", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HWDrawInfo::AddLines", "code": "void HWDrawInfo::AddLines(subsector_t * sub, sector_t * sector, FRenderState& state)\n{\n\tcurrentsector = sector;\n\tcurrentsubsector = sub;\n\n\tClipWall.Clock();\n\tif (sub->polys != nullptr)\n\t{\n\t\tAddPolyobjs(sub, state);\n\t}\n\telse\n\t{\n\t\tint count = sub->numlines;\n\t\tseg_t * seg = sub->firstline;\n\n\t\twhile (count--)\n\t\t{\n\t\t\tif (seg->linedef == nullptr)\n\t\t\t{\n\t\t\t\tif (!(sub->flags & SSECMF_DRAWN)) AddLine (seg, mClipPortal != nullptr, state);\n\t\t\t}\n\t\t\telse if (!(seg->sidedef->Flags & WALLF_POLYOBJ)) \n\t\t\t{\n\t\t\t\tAddLine (seg, mClipPortal != nullptr, state);\n\t\t\t}\n\t\t\tseg++;\n\t\t}\n\t}\n\tClipWall.Unclock();\n}", "docstring": "//==========================================================================\n//\n//\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/rendering/hwrenderer/scene/hw_bsp.cpp#L517-L546", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isLinePortal", "code": "static int isLinePortal(line_t *self)\n {\n\t return self->isLinePortal();\n }", "docstring": "//===========================================================================\n//\n// line_t exports\n//\n//===========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/scripting/vmthunks.cpp#L1215-L1218", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ParseActor", "code": "static void ParseActor(FScanner &sc, PNamespace *ns)\n{\n\tPClassActor *info = NULL;\n\tBaggage bag;\n\n\tbag.Namespace = ns;\n\tbag.Version = { 2, 0, 0 };\t\n\tbag.fromDecorate = true;\n\tbag.ScriptPosition = sc;\n\tinfo = ParseActorHeader(sc, &bag);\n\tsc.MustGetToken('{');\n\twhile (sc.MustGetAnyToken(), sc.TokenType != '}')\n\t{\n\t\tswitch (sc.TokenType)\n\t\t{\n\t\tcase TK_Const:\n\t\t\tParseConstant (sc, &info->VMType->Symbols, info, ns);\n\t\t\tbreak;\n\n\t\tcase TK_Enum:\n\t\t\tParseEnum (sc, &info->VMType->Symbols, info, ns);\n\t\t\tbreak;\n\n\t\tcase TK_Var:\n\t\t\tParseUserVariable (sc, &info->VMType->Symbols, info, ns);\n\t\t\tbreak;\n\n\t\tcase TK_Identifier:\n\t\t\tParseActorProperty(sc, bag);\n\t\t\tbreak;\n\n\t\tcase TK_States:\n\t\t\tParseStates(sc, bag.Info, (AActor *)bag.Info->Defaults, bag);\n\t\t\tbag.StateSet = true;\n\t\t\tbreak;\n\n\t\tcase '+':\n\t\tcase '-':\n\t\t\tParseActorFlag(sc, bag, sc.TokenType);\n\t\t\tbreak;\n\n\t\tdefault:\n\t\t\tsc.ScriptError(\"Unexpected '%s' in definition of '%s'\", sc.String, bag.Info->TypeName.GetChars());\n\t\t\tbreak;\n\t\t}\n\t}\n\tif (bag.DropItemSet)\n\t{\n\t\tbag.Info->SetDropItems(bag.DropItemList);\n\t}\n\ttry\n\t{\n\t\tFinalizeClass(info, bag.statedef);\n\t}\n\tcatch (CRecoverableError &err)\n\t{\n\t\tsc.ScriptError(\"%s\", err.GetMessage());\n\t}\n\tsc.SetCMode (false);\n}", "docstring": "//==========================================================================\n//\n// Reads an actor definition\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/scripting/decorate/thingdef_parse.cpp#L1153-L1212", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "S_PrecacheLevel", "code": "void S_PrecacheLevel(FLevelLocals* Level)\n{\n\tif (GSnd && Level == primaryLevel)\n\t{\n\t\tsoundEngine->MarkAllUnused();\n\n\t\tAActor* actor;\n\t\tauto iterator = Level->GetThinkerIterator();\n\n\t\t// Precache all sounds known to be used by the currently spawned actors.\n\t\twhile ((actor = iterator.Next()) != nullptr)\n\t\t{\n\t\t\tIFVIRTUALPTR(actor, AActor, MarkPrecacheSounds)\n\t\t\t{\n\t\t\t\tVMValue params[1] = { actor };\n\t\t\t\tVMCall(func, params, 1, nullptr, 0);\n\t\t\t}\n\t\t}\n\t\tfor (auto snd : gameinfo.PrecachedSounds)\n\t\t{\n\t\t\tsoundEngine->MarkUsed(snd);\n\t\t}\n\t\t// Precache all extra sounds requested by this map.\n\t\tfor (auto snd : primaryLevel->info->PrecacheSounds)\n\t\t{\n\t\t\tsoundEngine->MarkUsed(snd);\n\t\t}\n\t\tsoundEngine->CacheMarkedSounds();\n\t}\n}", "docstring": "//==========================================================================\n//\n// S_PrecacheLevel\n//\n// Like R_PrecacheLevel, but for sounds.\n//\n//==========================================================================", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/src/sound/s_doomsound.cpp#L328-L357", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FastDecodeTag", "code": "static uint32_t FastDecodeTag(uint16_t coded_tag) {\n uint32_t result = coded_tag;\n result += static_cast(coded_tag);\n return result >> 1;\n}", "docstring": "// On the fast path, a (matching) 2-byte tag always needs to be decoded.", "url": "https://github.com/channeldorg/channeld-ue-plugin/blob/624a5f6c51dbb898fe29a11c6afcd3eb0aaa2408/Source/ProtobufUE/ThirdParty/include/google/protobuf/generated_message_tctable_lite.cc#L150-L154", "sha": "624a5f6c51dbb898fe29a11c6afcd3eb0aaa2408"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGuiIO::AddInputCharacterUTF16", "code": "void ImGuiIO::AddInputCharacterUTF16(ImWchar16 c)\n{\n if (c == 0 && InputQueueSurrogate == 0)\n return;\n\n if ((c & 0xFC00) == 0xD800) // High surrogate, must save\n {\n if (InputQueueSurrogate != 0)\n InputQueueCharacters.push_back(IM_UNICODE_CODEPOINT_INVALID);\n InputQueueSurrogate = c;\n return;\n }\n\n ImWchar cp = c;\n if (InputQueueSurrogate != 0)\n {\n if ((c & 0xFC00) != 0xDC00) // Invalid low surrogate\n {\n InputQueueCharacters.push_back(IM_UNICODE_CODEPOINT_INVALID);\n }\n else\n {\n#if IM_UNICODE_CODEPOINT_MAX == 0xFFFF\n cp = IM_UNICODE_CODEPOINT_INVALID; // Codepoint will not fit in ImWchar\n#else\n cp = (ImWchar)(((InputQueueSurrogate - 0xD800) << 10) + (c - 0xDC00) + 0x10000);\n#endif\n }\n\n InputQueueSurrogate = 0;\n }\n InputQueueCharacters.push_back(cp);\n}", "docstring": "// UTF16 strings use surrogate pairs to encode codepoints >= 0x10000, so\n// we should save the high surrogate.", "url": "https://github.com/zer0condition/ReverseKit/blob/853adbea5c13dba18cca535c28650821570ea55c/ReverseKit/ImGui/imgui.cpp#L1155-L1187", "sha": "853adbea5c13dba18cca535c28650821570ea55c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BowVector::saveM", "code": "void BowVector::saveM(const std::string &filename, size_t W) const\n{\n std::fstream f(filename.c_str(), std::ios::out);\n \n WordId last = 0;\n BowVector::const_iterator bit;\n for(bit = this->begin(); bit != this->end(); ++bit)\n {\n for(; last < bit->first; ++last)\n {\n f << \"0 \";\n }\n f << bit->second << \" \";\n \n last = bit->first + 1;\n }\n for(; last < (WordId)W; ++last)\n f << \"0 \";\n \n f.close();\n}", "docstring": "// --------------------------------------------------------------------------", "url": "https://github.com/w111liang222/lidar-slam-detection/blob/d57a923b3972d0a0bfdfc0016c32de53c26b9f9f/slam/common/DBoW2/DBoW2/BowVector.cpp#L105-L125", "sha": "d57a923b3972d0a0bfdfc0016c32de53c26b9f9f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGuiIO::AddInputCharacterUTF16", "code": "void ImGuiIO::AddInputCharacterUTF16(ImWchar16 c)\n{\n if ((c == 0 && InputQueueSurrogate == 0) || !AppAcceptingEvents)\n return;\n\n if ((c & 0xFC00) == 0xD800) // High surrogate, must save\n {\n if (InputQueueSurrogate != 0)\n AddInputCharacter(IM_UNICODE_CODEPOINT_INVALID);\n InputQueueSurrogate = c;\n return;\n }\n\n ImWchar cp = c;\n if (InputQueueSurrogate != 0)\n {\n if ((c & 0xFC00) != 0xDC00) // Invalid low surrogate\n {\n AddInputCharacter(IM_UNICODE_CODEPOINT_INVALID);\n }\n else\n {\n#if IM_UNICODE_CODEPOINT_MAX == 0xFFFF\n cp = IM_UNICODE_CODEPOINT_INVALID; // Codepoint will not fit in ImWchar\n#else\n cp = (ImWchar)(((InputQueueSurrogate - 0xD800) << 10) + (c - 0xDC00) + 0x10000);\n#endif\n }\n\n InputQueueSurrogate = 0;\n }\n AddInputCharacter((unsigned)cp);\n}", "docstring": "// UTF16 strings use surrogate pairs to encode codepoints >= 0x10000, so\n// we should save the high surrogate.", "url": "https://github.com/bruhmoment21/cs2-sdk/blob/3fdb26b0eba5a7335f011c68bb5c7ef5a3171144/cs2-sdk/libs/imgui/imgui.cpp#L1346-L1378", "sha": "3fdb26b0eba5a7335f011c68bb5c7ef5a3171144"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ImGui::OpenPopupEx", "code": "void ImGui::OpenPopupEx(ImGuiID id, ImGuiPopupFlags popup_flags)\n{\n ImGuiContext& g = *GImGui;\n ImGuiWindow* parent_window = g.CurrentWindow;\n const int current_stack_size = g.BeginPopupStack.Size;\n\n if (popup_flags & ImGuiPopupFlags_NoOpenOverExistingPopup)\n if (IsPopupOpen((ImGuiID)0, ImGuiPopupFlags_AnyPopupId))\n return;\n\n ImGuiPopupData popup_ref; // Tagged as new ref as Window will be set back to NULL if we write this into OpenPopupStack.\n popup_ref.PopupId = id;\n popup_ref.Window = NULL;\n popup_ref.BackupNavWindow = g.NavWindow; // When popup closes focus may be restored to NavWindow (depend on window type).\n popup_ref.OpenFrameCount = g.FrameCount;\n popup_ref.OpenParentId = parent_window->IDStack.back();\n popup_ref.OpenPopupPos = NavCalcPreferredRefPos();\n popup_ref.OpenMousePos = IsMousePosValid(&g.IO.MousePos) ? g.IO.MousePos : popup_ref.OpenPopupPos;\n\n IMGUI_DEBUG_LOG_POPUP(\"[popup] OpenPopupEx(0x%08X)\\n\", id);\n if (g.OpenPopupStack.Size < current_stack_size + 1)\n {\n g.OpenPopupStack.push_back(popup_ref);\n }\n else\n {\n // Gently handle the user mistakenly calling OpenPopup() every frame. It is a programming mistake! However, if we were to run the regular code path, the ui\n // would become completely unusable because the popup will always be in hidden-while-calculating-size state _while_ claiming focus. Which would be a very confusing\n // situation for the programmer. Instead, we silently allow the popup to proceed, it will keep reappearing and the programming error will be more obvious to understand.\n if (g.OpenPopupStack[current_stack_size].PopupId == id && g.OpenPopupStack[current_stack_size].OpenFrameCount == g.FrameCount - 1)\n {\n g.OpenPopupStack[current_stack_size].OpenFrameCount = popup_ref.OpenFrameCount;\n }\n else\n {\n // Close child popups if any, then flag popup for open/reopen\n ClosePopupToLevel(current_stack_size, false);\n g.OpenPopupStack.push_back(popup_ref);\n }\n\n // When reopening a popup we first refocus its parent, otherwise if its parent is itself a popup it would get closed by ClosePopupsOverWindow().\n // This is equivalent to what ClosePopupToLevel() does.\n //if (g.OpenPopupStack[current_stack_size].PopupId == id)\n // FocusWindow(parent_window);\n }\n}", "docstring": "// Mark popup as open (toggle toward open state).\n// Popups are closed when user click outside, or activate a pressable item, or CloseCurrentPopup() is called within a BeginPopup()/EndPopup() block.\n// Popup identifiers are relative to the current ID-stack (so OpenPopup and BeginPopup needs to be at the same level).\n// One open popup per level of the popup hierarchy (NB: when assigning we reset the Window member of ImGuiPopupRef to NULL)", "url": "https://github.com/bruhmoment21/cs2-sdk/blob/3fdb26b0eba5a7335f011c68bb5c7ef5a3171144/cs2-sdk/libs/imgui/imgui.cpp#L10544-L10589", "sha": "3fdb26b0eba5a7335f011c68bb5c7ef5a3171144"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Document::isRowHidden", "code": "bool Document::isRowHidden(int row)\n{\n\tif (Worksheet *sheet = currentWorksheet())\n\t return sheet->isRowHidden(row);\n\t return false;\n}", "docstring": "/*!\n Returns true if \\a row is hidden.\n*/", "url": "https://github.com/beixiaocai/DS/blob/25458ea1f0dbabade10969d462f3770f4b3a08a9/3rdparty/QXlsx/source/xlsxdocument.cpp#L941-L946", "sha": "25458ea1f0dbabade10969d462f3770f4b3a08a9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Worksheet::saveToXmlFile", "code": "void Worksheet::saveToXmlFile(QIODevice *device) const\n{\n\tQ_D(const Worksheet);\n\td->relationships->clear();\n\n\tQXmlStreamWriter writer(device);\n\n\twriter.writeStartDocument(QStringLiteral(\"1.0\"), true);\n\twriter.writeStartElement(QStringLiteral(\"worksheet\"));\n\twriter.writeAttribute(QStringLiteral(\"xmlns\"), QStringLiteral(\"http://schemas.openxmlformats.org/spreadsheetml/2006/main\"));\n\twriter.writeAttribute(QStringLiteral(\"xmlns:r\"), QStringLiteral(\"http://schemas.openxmlformats.org/officeDocument/2006/relationships\"));\n\n\t//for Excel 2010\n\t// writer.writeAttribute(\"xmlns:mc\", \"http://schemas.openxmlformats.org/markup-compatibility/2006\");\n\t// writer.writeAttribute(\"xmlns:x14ac\", \"http://schemas.microsoft.com/office/spreadsheetml/2009/9/ac\");\n\t// writer.writeAttribute(\"mc:Ignorable\", \"x14ac\");\n\n\twriter.writeStartElement(QStringLiteral(\"dimension\"));\n\twriter.writeAttribute(QStringLiteral(\"ref\"), d->generateDimensionString());\n\twriter.writeEndElement();//dimension\n\n\twriter.writeStartElement(QStringLiteral(\"sheetViews\"));\n\twriter.writeStartElement(QStringLiteral(\"sheetView\"));\n\tif (d->windowProtection)\n\t\twriter.writeAttribute(QStringLiteral(\"windowProtection\"), QStringLiteral(\"1\"));\n\tif (d->showFormulas)\n\t\twriter.writeAttribute(QStringLiteral(\"showFormulas\"), QStringLiteral(\"1\"));\n\tif (!d->showGridLines)\n\t\twriter.writeAttribute(QStringLiteral(\"showGridLines\"), QStringLiteral(\"0\"));\n\tif (!d->showRowColHeaders)\n\t\twriter.writeAttribute(QStringLiteral(\"showRowColHeaders\"), QStringLiteral(\"0\"));\n\tif (!d->showZeros)\n\t\twriter.writeAttribute(QStringLiteral(\"showZeros\"), QStringLiteral(\"0\"));\n\tif (d->rightToLeft)\n\t\twriter.writeAttribute(QStringLiteral(\"rightToLeft\"), QStringLiteral(\"1\"));\n\tif (d->tabSelected)\n\t\twriter.writeAttribute(QStringLiteral(\"tabSelected\"), QStringLiteral(\"1\"));\n\tif (!d->showRuler)\n\t\twriter.writeAttribute(QStringLiteral(\"showRuler\"), QStringLiteral(\"0\"));\n\tif (!d->showOutlineSymbols)\n\t\twriter.writeAttribute(QStringLiteral(\"showOutlineSymbols\"), QStringLiteral(\"0\"));\n\tif (!d->showWhiteSpace)\n\t\twriter.writeAttribute(QStringLiteral(\"showWhiteSpace\"), QStringLiteral(\"0\"));\n\twriter.writeAttribute(QStringLiteral(\"workbookViewId\"), QStringLiteral(\"0\"));\n\twriter.writeEndElement();//sheetView\n\twriter.writeEndElement();//sheetViews\n\n\twriter.writeStartElement(QStringLiteral(\"sheetFormatPr\"));\n\twriter.writeAttribute(QStringLiteral(\"defaultRowHeight\"), QString::number(d->default_row_height));\n\tif (d->default_row_height != 15)\n\t\twriter.writeAttribute(QStringLiteral(\"customHeight\"), QStringLiteral(\"1\"));\n\tif (d->default_row_zeroed)\n\t\twriter.writeAttribute(QStringLiteral(\"zeroHeight\"), QStringLiteral(\"1\"));\n\tif (d->outline_row_level)\n\t\twriter.writeAttribute(QStringLiteral(\"outlineLevelRow\"), QString::number(d->outline_row_level));\n\tif (d->outline_col_level)\n\t\twriter.writeAttribute(QStringLiteral(\"outlineLevelCol\"), QString::number(d->outline_col_level));\n\t//for Excel 2010\n\t// writer.writeAttribute(\"x14ac:dyDescent\", \"0.25\");\n\twriter.writeEndElement();//sheetFormatPr\n\n if (!d->colsInfo.isEmpty())\n {\n\t\twriter.writeStartElement(QStringLiteral(\"cols\"));\n\t\tQMapIterator > it(d->colsInfo);\n while (it.hasNext())\n {\n\t\t\tit.next();\n\t\t\tQSharedPointer col_info = it.value();\n\t\t\twriter.writeStartElement(QStringLiteral(\"col\"));\n\t\t\twriter.writeAttribute(QStringLiteral(\"min\"), QString::number(col_info->firstColumn));\n\t\t\twriter.writeAttribute(QStringLiteral(\"max\"), QString::number(col_info->lastColumn));\n\t\t\tif (col_info->width)\n\t\t\t\twriter.writeAttribute(QStringLiteral(\"width\"), QString::number(col_info->width, 'g', 15));\n\t\t\tif (!col_info->format.isEmpty())\n\t\t\t\twriter.writeAttribute(QStringLiteral(\"style\"), QString::number(col_info->format.xfIndex()));\n\t\t\tif (col_info->hidden)\n\t\t\t\twriter.writeAttribute(QStringLiteral(\"hidden\"), QStringLiteral(\"1\"));\n\t\t\tif (col_info->width)\n\t\t\t\twriter.writeAttribute(QStringLiteral(\"customWidth\"), QStringLiteral(\"1\"));\n\t\t\tif (col_info->outlineLevel)\n\t\t\t\twriter.writeAttribute(QStringLiteral(\"outlineLevel\"), QString::number(col_info->outlineLevel));\n\t\t\tif (col_info->collapsed)\n\t\t\t\twriter.writeAttribute(QStringLiteral(\"collapsed\"), QStringLiteral(\"1\"));\n\t\t\twriter.writeEndElement();//col\n\t\t}\n\t\twriter.writeEndElement();//cols\n\t}\n\n\twriter.writeStartElement(QStringLiteral(\"sheetData\"));\n\tif (d->dimension.isValid())\n\t\td->saveXmlSheetData(writer);\n\twriter.writeEndElement();//sheetData\n\n\td->saveXmlMergeCells(writer);\n for (const ConditionalFormatting &cf : d->conditionalFormattingList)\n\t\tcf.saveToXml(writer);\n\td->saveXmlDataValidations(writer);\n\n //{{ liufeijin : write pagesettings add by liufeijin 20181028\n\n // fixed by j2doll [dev18]\n // NOTE: empty element is not problem. but, empty structure of element is not parsed by Excel.\n\n // pageMargins\n if ( false == d->PMleft.isEmpty() &&\n false == d->PMright.isEmpty() &&\n false == d->PMtop.isEmpty() &&\n false == d->PMbotton.isEmpty() &&\n false == d->PMheader.isEmpty() &&\n false == d->PMfooter.isEmpty()\n )\n {\n writer.writeStartElement(QStringLiteral(\"pageMargins\"));\n\n writer.writeAttribute(QStringLiteral(\"left\"), d->PMleft );\n writer.writeAttribute(QStringLiteral(\"right\"), d->PMright );\n writer.writeAttribute(QStringLiteral(\"top\"), d->PMtop );\n writer.writeAttribute(QStringLiteral(\"bottom\"), d->PMbotton );\n writer.writeAttribute(QStringLiteral(\"header\"), d->PMheader );\n writer.writeAttribute(QStringLiteral(\"footer\"), d->PMfooter );\n\n writer.writeEndElement(); // pageMargins\n }\n\n // dev57\n if ( !d->Prid.isEmpty() )\n {\n writer.writeStartElement(QStringLiteral(\"pageSetup\")); // pageSetup\n\n writer.writeAttribute(QStringLiteral(\"r:id\"), d->Prid);\n\n if ( !d->PverticalDpi.isEmpty() )\n {\n writer.writeAttribute(QStringLiteral(\"verticalDpi\"), d->PverticalDpi);\n }\n\n if ( !d->PhorizontalDpi.isEmpty() )\n {\n writer.writeAttribute(QStringLiteral(\"horizontalDpi\"), d->PhorizontalDpi);\n }\n\n if ( !d->PuseFirstPageNumber.isEmpty() )\n {\n writer.writeAttribute(QStringLiteral(\"useFirstPageNumber\"), d->PuseFirstPageNumber);\n }\n\n if ( !d->PfirstPageNumber.isEmpty() )\n {\n writer.writeAttribute(QStringLiteral(\"firstPageNumber\"), d->PfirstPageNumber);\n }\n\n if ( !d->Pscale.isEmpty() )\n {\n writer.writeAttribute(QStringLiteral(\"scale\"), d->Pscale);\n }\n\n if ( !d->PpaperSize.isEmpty() )\n {\n writer.writeAttribute(QStringLiteral(\"paperSize\"), d->PpaperSize);\n }\n\n if ( !d->Porientation.isEmpty() )\n {\n writer.writeAttribute(QStringLiteral(\"orientation\"), d->Porientation);\n }\n\n if(!d->Pcopies.isEmpty())\n {\n writer.writeAttribute(QStringLiteral(\"copies\"), d->Pcopies);\n }\n\n writer.writeEndElement(); // pageSetup\n\n } // if ( !d->Prid.isEmpty() )\n\n // headerFooter\n if( !(d->ModdHeader.isNull()) ||\n !(d->MoodFooter.isNull()) )\n {\n writer.writeStartElement(QStringLiteral(\"headerFooter\")); // headerFooter\n\n if ( !d->MoodalignWithMargins.isEmpty() )\n {\n writer.writeAttribute(QStringLiteral(\"alignWithMargins\"), d->MoodalignWithMargins);\n }\n\n if ( !d->ModdHeader.isNull() )\n {\n writer.writeStartElement(QStringLiteral(\"oddHeader\"));\n writer.writeCharacters(d->ModdHeader);\n writer.writeEndElement(); // oddHeader\n }\n\n if ( !d->MoodFooter.isNull() )\n {\n writer.writeTextElement(QStringLiteral(\"oddFooter\"), d->MoodFooter);\n }\n\n writer.writeEndElement(); // headerFooter\n }\n\n\td->saveXmlHyperlinks(writer);\n\td->saveXmlDrawings(writer);\n\n writer.writeEndElement(); // worksheet\n\twriter.writeEndDocument();\n}", "docstring": "/*!\n * \\internal\n */", "url": "https://github.com/beixiaocai/DS/blob/25458ea1f0dbabade10969d462f3770f4b3a08a9/3rdparty/QXlsx/source/xlsxworksheet.cpp#L1308-L1515", "sha": "25458ea1f0dbabade10969d462f3770f4b3a08a9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Mutex::ThreadSafeLazyInit", "code": "void Mutex::ThreadSafeLazyInit() {\n // Dynamic mutexes are initialized in the constructor.\n if (type_ == kStatic) {\n switch (\n ::InterlockedCompareExchange(&critical_section_init_phase_, 1L, 0L)) {\n case 0:\n // If critical_section_init_phase_ was 0 before the exchange, we\n // are the first to test it and need to perform the initialization.\n owner_thread_id_ = 0;\n critical_section_ = new CRITICAL_SECTION;\n ::InitializeCriticalSection(critical_section_);\n // Updates the critical_section_init_phase_ to 2 to signal\n // initialization complete.\n GTEST_CHECK_(::InterlockedCompareExchange(\n &critical_section_init_phase_, 2L, 1L) ==\n 1L);\n break;\n case 1:\n // Somebody else is already initializing the mutex; spin until they\n // are done.\n while (::InterlockedCompareExchange(&critical_section_init_phase_,\n 2L,\n 2L) != 2L) {\n // Possibly yields the rest of the thread's time slice to other\n // threads.\n ::Sleep(0);\n }\n break;\n\n case 2:\n break; // The mutex is already initialized and ready for use.\n\n default:\n GTEST_CHECK_(false)\n << \"Unexpected value of critical_section_init_phase_ \"\n << \"while initializing a static mutex.\";\n }\n }\n}", "docstring": "// Initializes owner_thread_id_ and critical_section_ in static mutexes.", "url": "https://github.com/linClubs/Calibration-Is-All-You-Need/blob/efdad7d51644258a63ab831171576d5330024e40/Thirdparty/ceres-solver-1.14.0/internal/ceres/gmock_gtest_all.cc#L8832-L8870", "sha": "efdad7d51644258a63ab831171576d5330024e40"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ValidateRegex", "code": "bool ValidateRegex(const char* regex) {\n if (regex == NULL) {\n // TODO(wan@google.com): fix the source file location in the\n // assertion failures to match where the regex is used in user\n // code.\n ADD_FAILURE() << \"NULL is not a valid simple regular expression.\";\n return false;\n }\n\n bool is_valid = true;\n\n // True iff ?, *, or + can follow the previous atom.\n bool prev_repeatable = false;\n for (int i = 0; regex[i]; i++) {\n if (regex[i] == '\\\\') { // An escape sequence\n i++;\n if (regex[i] == '\\0') {\n ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)\n << \"'\\\\' cannot appear at the end.\";\n return false;\n }\n\n if (!IsValidEscape(regex[i])) {\n ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)\n << \"invalid escape sequence \\\"\\\\\" << regex[i] << \"\\\".\";\n is_valid = false;\n }\n prev_repeatable = true;\n } else { // Not an escape sequence.\n const char ch = regex[i];\n\n if (ch == '^' && i > 0) {\n ADD_FAILURE() << FormatRegexSyntaxError(regex, i)\n << \"'^' can only appear at the beginning.\";\n is_valid = false;\n } else if (ch == '$' && regex[i + 1] != '\\0') {\n ADD_FAILURE() << FormatRegexSyntaxError(regex, i)\n << \"'$' can only appear at the end.\";\n is_valid = false;\n } else if (IsInSet(ch, \"()[]{}|\")) {\n ADD_FAILURE() << FormatRegexSyntaxError(regex, i)\n << \"'\" << ch << \"' is unsupported.\";\n is_valid = false;\n } else if (IsRepeat(ch) && !prev_repeatable) {\n ADD_FAILURE() << FormatRegexSyntaxError(regex, i)\n << \"'\" << ch << \"' can only follow a repeatable token.\";\n is_valid = false;\n }\n\n prev_repeatable = !IsInSet(ch, \"^$?*+\");\n }\n }\n\n return is_valid;\n}", "docstring": "// Generates non-fatal failures and returns false if regex is invalid;\n// otherwise returns true.", "url": "https://github.com/linClubs/Calibration-Is-All-You-Need/blob/efdad7d51644258a63ab831171576d5330024e40/Thirdparty/ceres-solver-1.14.0/internal/ceres/gmock_gtest_all.cc#L9230-L9284", "sha": "efdad7d51644258a63ab831171576d5330024e40"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Cardinality AtLeast", "code": "GTEST_API_ Cardinality AtLeast(int n) { return Between(n, INT_MAX); }", "docstring": "// Creates a cardinality that allows at least n calls.", "url": "https://github.com/linClubs/Calibration-Is-All-You-Need/blob/efdad7d51644258a63ab831171576d5330024e40/Thirdparty/ceres-solver-1.14.0/internal/ceres/gmock_gtest_all.cc#L10582-L10582", "sha": "efdad7d51644258a63ab831171576d5330024e40"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LogIsVisible", "code": "GTEST_API_ bool LogIsVisible(LogSeverity severity) {\n if (GMOCK_FLAG(verbose) == kInfoVerbosity) {\n // Always show the log if --gmock_verbose=info.\n return true;\n } else if (GMOCK_FLAG(verbose) == kErrorVerbosity) {\n // Always hide it if --gmock_verbose=error.\n return false;\n } else {\n // If --gmock_verbose is neither \"info\" nor \"error\", we treat it\n // as \"warning\" (its default value).\n return severity == kWarning;\n }\n}", "docstring": "// Returns true iff a log with the given severity is visible according\n// to the --gmock_verbose flag.", "url": "https://github.com/linClubs/Calibration-Is-All-You-Need/blob/efdad7d51644258a63ab831171576d5330024e40/Thirdparty/ceres-solver-1.14.0/internal/ceres/gmock_gtest_all.cc#L10704-L10716", "sha": "efdad7d51644258a63ab831171576d5330024e40"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TEST", "code": "TEST(Rotation, UnitQuaternionToAngleAxis) {\n double quaternion[4] = { 1, 0, 0, 0 };\n double axis_angle[3];\n double expected[3] = { 0, 0, 0 };\n QuaternionToAngleAxis(quaternion, axis_angle);\n EXPECT_THAT(axis_angle, IsNearAngleAxis(expected));\n}", "docstring": "// Transforms a unit quaternion to an axis angle.", "url": "https://github.com/linClubs/Calibration-Is-All-You-Need/blob/efdad7d51644258a63ab831171576d5330024e40/Thirdparty/ceres-solver-1.14.0/internal/ceres/rotation_test.cc#L271-L277", "sha": "efdad7d51644258a63ab831171576d5330024e40"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UncertainBackProjection::UncertainBackProjection", "code": "UncertainBackProjection::UncertainBackProjection(\n const sm::kinematics::UncertainVector3 & ray)\n : ray(ray) {\n}", "docstring": "/// \\brief the view origin is set to zero", "url": "https://github.com/linClubs/Calibration-Is-All-You-Need/blob/efdad7d51644258a63ab831171576d5330024e40/kalibr/aslam_cv/aslam_cameras/src/BackProjection.cpp#L34-L37", "sha": "efdad7d51644258a63ab831171576d5330024e40"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RotationExpression::inverse", "code": "RotationExpression RotationExpression::inverse()\n {\n boost::shared_ptr newRoot( new RotationExpressionNodeInverse(_root) );\n return RotationExpression(newRoot);\n }", "docstring": "/// \\brief return the expression that inverts the rotation.", "url": "https://github.com/linClubs/Calibration-Is-All-You-Need/blob/efdad7d51644258a63ab831171576d5330024e40/kalibr/aslam_optimizer/aslam_backend_expressions/src/RotationExpression.cpp#L39-L43", "sha": "efdad7d51644258a63ab831171576d5330024e40"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "function_namespace_env", "code": "Function function_namespace_env(){\n Environment ns = Environment::namespace_env( \"stats\" ) ;\n Function fun = ns[\".asSparse\"] ; // accesses a non-exported function\n return fun;\n}", "docstring": "// [[Rcpp::export]]", "url": "https://github.com/deepskydetail/AstroSharp/blob/b632424fa2e9749d008851cf40f49b5fcec490d3/resources/app/R-Portable-Mac/library/Rcpp/unitTests/cpp/Function.cpp#L74-L78", "sha": "b632424fa2e9749d008851cf40f49b5fcec490d3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "runit_SubMatrix", "code": "NumericMatrix runit_SubMatrix( ){\n NumericMatrix xx(4, 5);\n xx(0,0) = 3;\n xx(0,1) = 4;\n xx(0,2) = 5;\n xx(1,_) = xx(0,_);\n xx(_,3) = xx(_,2);\n SubMatrix yy = xx( Range(0,2), Range(0,3) ) ;\n NumericMatrix res = yy ;\n return res;\n}", "docstring": "// [[Rcpp::export]]", "url": "https://github.com/deepskydetail/AstroSharp/blob/b632424fa2e9749d008851cf40f49b5fcec490d3/resources/app/R-Portable-Mac/library/Rcpp/unitTests/cpp/Matrix.cpp#L249-L259", "sha": "b632424fa2e9749d008851cf40f49b5fcec490d3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "list_erase", "code": "List list_erase( List list ){\n list.erase( list.begin() ) ;\n return list ;\n}", "docstring": "// [[Rcpp::export]]", "url": "https://github.com/deepskydetail/AstroSharp/blob/b632424fa2e9749d008851cf40f49b5fcec490d3/resources/app/R-Portable-Mac/library/Rcpp/unitTests/cpp/Vector.cpp#L408-L411", "sha": "b632424fa2e9749d008851cf40f49b5fcec490d3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "vec_print_character", "code": "String vec_print_character(CharacterVector v) {\n std::ostringstream buf;\n buf << v;\n return buf.str();\n}", "docstring": "// [[Rcpp::export]]", "url": "https://github.com/deepskydetail/AstroSharp/blob/b632424fa2e9749d008851cf40f49b5fcec490d3/resources/app/R-Portable-Mac/library/Rcpp/unitTests/cpp/Vector.cpp#L834-L838", "sha": "b632424fa2e9749d008851cf40f49b5fcec490d3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "runit_dt", "code": "List runit_dt( NumericVector xx){\n return List::create(\n _[\"false\"] = dt( xx, 5),\n _[\"true\"]\t = dt( xx, 5, true ));\n}", "docstring": "// [[Rcpp::export]]", "url": "https://github.com/deepskydetail/AstroSharp/blob/b632424fa2e9749d008851cf40f49b5fcec490d3/resources/app/R-Portable-Mac/library/Rcpp/unitTests/cpp/stats.cpp#L83-L87", "sha": "b632424fa2e9749d008851cf40f49b5fcec490d3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "formatXP", "code": "void formatXP(BOOL bUpgrade, WCHAR *pBSection, WCHAR *pCSection, WCHAR *pText) {\n WCHAR pDashedKey[PK_LENGTH + 4 + NULL_TERMINATOR]{};\n INT pSSection = 0;\n\n for (int i = 0; i < wcslen(pCSection); i++)\n pSSection -= pCSection[i] - '0';\n\n while (pSSection < 0)\n pSSection += 7;\n\n CHAR pKey[PK_LENGTH + NULL_TERMINATOR]{};\n DWORD nChannelID = wcstoul(pBSection, nullptr, 10),\n nSequence = wcstoul(pCSection, nullptr, 10);\n\n BOOL bValid = keyXP(pKey, pBINKPreset, nChannelID, nSequence, bUpgrade);\n\n QWORD pRaw[2]{},\n pSignature;\n\n DWORD pChannelID,\n pSequence,\n pSerial,\n pHash;\n\n BOOL pUpgrade;\n\n unbase24((BYTE *)pRaw, pKey);\n unpackXP(pRaw, pUpgrade, pChannelID, pSequence, pHash, pSignature);\n\n pSerial = pChannelID * 1'000'000 + pSequence;\n\n for (int i = 0; i < 5; i++)\n wsprintfW(pDashedKey, L\"%s%s%.5S\", pDashedKey, i != 0 ? L\"-\" : L\"\", &pKey[5 * i]);\n\n swprintf(\n pText,\n L\"PRODUCT ID:\\tPPPPP-%03d-%06d%d-23XXX\\r\\n\\r\\nBYTECODE:\\t%016llX %016llX\\r\\nUPGRADE:\\t%s\\r\\nSERIAL:\\t\\t0x%lX (%d)\\r\\nHASH:\\t\\t0x%lX\\r\\nSIGNATURE:\\t0x%llX\\r\\nCURVE POINT:\\t%s\\r\\n\\r\\n\\r\\n%s\\r\\n\",\n pChannelID,\n pSequence,\n pSSection,\n pRaw[1], pRaw[0],\n pUpgrade ? L\"TRUE\" : L\"FALSE\",\n pSerial, pSerial,\n pHash,\n pSignature,\n bValid ? L\"TRUE\" : L\"FALSE\",\n pDashedKey\n );\n}", "docstring": "/* Formats Windows XP key output. */", "url": "https://github.com/Endermanch/XPKeygen/blob/f0df34a8222c38ffce8a02bb107106b3d85af0f9/src/key.cpp#L76-L124", "sha": "f0df34a8222c38ffce8a02bb107106b3d85af0f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CMpegAudEncPropertyPageAdv::SetDirty", "code": "void CMpegAudEncPropertyPageAdv::SetDirty()\n{\n m_bDirty = TRUE;\n if (m_pPageSite)\n m_pPageSite->OnStatusChange(PROPPAGESTATUS_DIRTY);\n}", "docstring": "//\n// SetDirty\n//\n// notifies the property page site of changes", "url": "https://github.com/ArdenButterfield/Maim/blob/7f2e5f87f3e3bd7b06916343f336d94fee5f9b85/lib/lame/dshow/PropPage_adv.cpp#L361-L366", "sha": "7f2e5f87f3e3bd7b06916343f336d94fee5f9b85"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isBusyFifoRead", "code": "static bool isBusyFifoRead() {\n return !(SDHC_PRSSTAT & SDHC_PRSSTAT_BREN);\n}", "docstring": "//------------------------------------------------------------------------------", "url": "https://github.com/xiphonics/picoTracker/blob/fa317182a9fe6642b20184a850a4ae0061263a3f/sources/Externals/SdFat/src/SdCard/SdioTeensy.cpp#L490-L492", "sha": "fa317182a9fe6642b20184a850a4ae0061263a3f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "abs", "code": "ark::bfloat16_t abs(ark::bfloat16_t const& h) { return ark::abs(h); }", "docstring": "///////////////////////////////////////////////////////////////////////////////////////////////////\n//\n// Standard Library operations and definitions\n//\n///////////////////////////////////////////////////////////////////////////////////////////////////", "url": "https://github.com/microsoft/ark/blob/d8bbaebd552f47f01b7f0b1ecde3512142435833/ark/bfloat16.cpp#L133-L133", "sha": "d8bbaebd552f47f01b7f0b1ecde3512142435833"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FastAccelStepperEngine::setDebugLed", "code": "void FastAccelStepperEngine::setDebugLed(uint8_t ledPin) {\n fas_ledPin = ledPin;\n pinMode(fas_ledPin, OUTPUT);\n digitalWrite(fas_ledPin, LOW);\n}", "docstring": "//*************************************************************************************************", "url": "https://github.com/tjfenwick/DIY-Sim-Racing-Active-Pedal/blob/b6dcceeb2a92c99b51f9b90c2f4379765486d204/Arduino/Library/FastAccelStepper/src/FastAccelStepper.cpp#L92-L96", "sha": "b6dcceeb2a92c99b51f9b90c2f4379765486d204"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ScopedFakeTestPartResultReporter::~ScopedFakeTestPartResultReporter", "code": "ScopedFakeTestPartResultReporter::~ScopedFakeTestPartResultReporter() {\n internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();\n if (intercept_mode_ == INTERCEPT_ALL_THREADS) {\n impl->SetGlobalTestPartResultReporter(old_reporter_);\n } else {\n impl->SetTestPartResultReporterForCurrentThread(old_reporter_);\n }\n}", "docstring": "// The d'tor restores the test part result reporter used by Google Test\n// before.", "url": "https://github.com/srivatsankrishnan/oss-arch-gym/blob/fab6d1442541b5cdf40daf24e64e63261da2d846/sims/AstraSim/protobuf-3.12.4/third_party/googletest/googletest/fused-src/gtest/gtest-all.cc#L2096-L2103", "sha": "fab6d1442541b5cdf40daf24e64e63261da2d846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TestEventListeners::SetDefaultResultPrinter", "code": "void TestEventListeners::SetDefaultResultPrinter(TestEventListener* listener) {\n if (default_result_printer_ != listener) {\n // It is an error to pass this method a listener that is already in the\n // list.\n delete Release(default_result_printer_);\n default_result_printer_ = listener;\n if (listener != nullptr) Append(listener);\n }\n}", "docstring": "// Sets the default_result_printer attribute to the provided listener.\n// The listener is also added to the listener list and previous\n// default_result_printer is removed from it and deleted. The listener can\n// also be NULL in which case it will not be added to the list. Does\n// nothing if the previous and the current listener objects are the same.", "url": "https://github.com/srivatsankrishnan/oss-arch-gym/blob/fab6d1442541b5cdf40daf24e64e63261da2d846/sims/AstraSim/protobuf-3.12.4/third_party/googletest/googletest/fused-src/gtest/gtest-all.cc#L5970-L5978", "sha": "fab6d1442541b5cdf40daf24e64e63261da2d846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CaptureStderr", "code": "void CaptureStderr() {\n CaptureStream(kStdErrFileno, \"stderr\", &g_captured_stderr);\n}", "docstring": "// Starts capturing stderr.", "url": "https://github.com/srivatsankrishnan/oss-arch-gym/blob/fab6d1442541b5cdf40daf24e64e63261da2d846/sims/AstraSim/protobuf-3.12.4/third_party/googletest/googletest/fused-src/gtest/gtest-all.cc#L10800-L10802", "sha": "fab6d1442541b5cdf40daf24e64e63261da2d846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TEST_F", "code": "TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) {\n ASSERT_PRED_FORMAT4(PredFormatFunction4,\n Bool(++n1_),\n Bool(++n2_),\n Bool(++n3_),\n Bool(++n4_));\n finished_ = true;\n}", "docstring": "// Tests a successful ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a user-defined type (Bool).", "url": "https://github.com/srivatsankrishnan/oss-arch-gym/blob/fab6d1442541b5cdf40daf24e64e63261da2d846/sims/AstraSim/protobuf-3.12.4/third_party/googletest/googletest/test/gtest_pred_impl_unittest.cc#L1787-L1794", "sha": "fab6d1442541b5cdf40daf24e64e63261da2d846"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getNonUnitDimension", "code": "FailureOr> getNonUnitDimension(\n RankedTensorType tensorTy) {\n auto shape = tensorTy.getShape();\n\n if (llvm::count_if(shape, [](auto dim) { return dim != 1; }) != 1) {\n return failure();\n }\n\n unsigned nonUnitIndex = std::distance(\n shape.begin(), llvm::find_if(shape, [&](auto dim) { return dim != 1; }));\n\n return std::make_pair(nonUnitIndex, shape[nonUnitIndex]);\n}", "docstring": "// Returns the unique non-unit dimension of a tensor and its rank.\n// Returns failure if the tensor has more than one non-unit dimension.", "url": "https://github.com/google/heir/blob/fae5e9552a2d177da41381101d4a6c0f426eb2ee/lib/Transforms/SecretInsertMgmt/SecretInsertMgmtCKKS.cpp#L45-L57", "sha": "fae5e9552a2d177da41381101d4a6c0f426eb2ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DetourUpdateProcessWithDll", "code": "BOOL WINAPI DetourUpdateProcessWithDll(_In_ HANDLE hProcess,\n _In_reads_(nDlls) LPCSTR *rlpDlls,\n _In_ DWORD nDlls)\n{\n // Find the next memory region that contains a mapped PE image.\n //\n BOOL bHas64BitDll = FALSE;\n BOOL bHas32BitExe = FALSE;\n BOOL bIs32BitProcess;\n HMODULE hModule = NULL;\n HMODULE hLast = NULL;\n\n DETOUR_TRACE((\"DetourUpdateProcessWithDll(%p,dlls=%d)\\n\", hProcess, nDlls));\n\n for (;;) {\n IMAGE_NT_HEADERS32 inh;\n\n if ((hLast = EnumerateModulesInProcess(hProcess, hLast, &inh)) == NULL) {\n break;\n }\n\n DETOUR_TRACE((\"%p machine=%04x magic=%04x\\n\",\n hLast, inh.FileHeader.Machine, inh.OptionalHeader.Magic));\n\n if ((inh.FileHeader.Characteristics & IMAGE_FILE_DLL) == 0) {\n hModule = hLast;\n if (inh.OptionalHeader.Magic == IMAGE_NT_OPTIONAL_HDR32_MAGIC\n && inh.FileHeader.Machine != 0) {\n\n bHas32BitExe = TRUE;\n }\n DETOUR_TRACE((\"%p Found EXE\\n\", hLast));\n }\n else {\n if (inh.OptionalHeader.Magic == IMAGE_NT_OPTIONAL_HDR64_MAGIC\n && inh.FileHeader.Machine != 0) {\n\n bHas64BitDll = TRUE;\n }\n }\n }\n\n if (hModule == NULL) {\n SetLastError(ERROR_INVALID_OPERATION);\n return FALSE;\n }\n\n if (!bHas32BitExe) {\n bIs32BitProcess = FALSE;\n }\n else if (!bHas64BitDll) {\n bIs32BitProcess = TRUE;\n }\n else {\n if (!IsWow64Process(hProcess, &bIs32BitProcess)) {\n return FALSE;\n }\n }\n\n DETOUR_TRACE((\" 32BitExe=%d 32BitProcess\\n\", bHas32BitExe, bIs32BitProcess));\n\n return DetourUpdateProcessWithDllEx(hProcess,\n hModule,\n bIs32BitProcess,\n rlpDlls,\n nDlls);\n}", "docstring": "// DETOURS_64BIT\n//////////////////////////////////////////////////////////////////////////////\n//", "url": "https://github.com/oxiKKK/oxware/blob/eb81ecf3fe839b0ca01a19c41ae7e5381d13102d/src/external/detours/creatwth.cpp#L516-L582", "sha": "eb81ecf3fe839b0ca01a19c41ae7e5381d13102d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_appdata_dir", "code": "static std::filesystem::path get_appdata_dir()\n{\n\tPWSTR pwstr_appdata_directory;\n\tHRESULT result = SHGetKnownFolderPath(FOLDERID_RoamingAppData, KF_FLAG_DEFAULT, NULL, &pwstr_appdata_directory);\n\tassert(SUCCEEDED(result));\n\tstd::filesystem::path ret = pwstr_appdata_directory;\n\tCoTaskMemFree(pwstr_appdata_directory);\n\treturn ret / \"oxware\";\n}", "docstring": "// just a little hack over the fact that this code already exists inside the FileSystem code.. since we\n// want to have the console ready right as soon as the application starts, we cannot just use the appdata manager\n// or the filesystem, so yeah.. this kinda sucks but whatever, its worth it so that we can log as soon as we start..", "url": "https://github.com/oxiKKK/oxware/blob/eb81ecf3fe839b0ca01a19c41ae7e5381d13102d/src/public/DeveloperConsole.cpp#L488-L496", "sha": "eb81ecf3fe839b0ca01a19c41ae7e5381d13102d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SortList.getMid", "code": "public ListNode getMid(ListNode head) {\n ListNode slow = head, fast = head.next;\n while (fast != null && fast.next != null) {\n fast = fast.next.next;\n slow = slow.next;\n }\n return slow;\n }", "docstring": "/**\n * Find middle with usage slow and fast pointers\n */", "url": "https://github.com/Top4IkRu/java-interview/blob/a26c8fdc50157108e12cc1239195b89234e4bfb4/algorithms/src/main/java/solution/linked_list/medium/SortList.java#L35-L42", "sha": "a26c8fdc50157108e12cc1239195b89234e4bfb4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SystemController.index", "code": "@GetMapping(value = \"/index\")\n public String index(HttpServletRequest httpRequest, HttpSession session, Model model) {\n return \"system/index\";\n }", "docstring": "/**\n * 跳转到浪海博客后台管理首页\n *\n * @return 后台管理首页页面 system/index.html\n */", "url": "https://github.com/Allenkuzma/langhaiblogs/blob/a77bd2103800a65f11812b04afb506aeea9c9eae/src/main/java/cc/langhai/controller/system/SystemController.java#L36-L39", "sha": "a77bd2103800a65f11812b04afb506aeea9c9eae"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SettingManager.getSettingList", "code": "public ArrayList getSettingList() {\n return settingList;\n }", "docstring": "/**\n * @return Returns an Arraylist of all settings\n */", "url": "https://github.com/cloudclientdev/cloudclient/blob/4e2752fdcb45f6f45468aee1eb247c1da41bcd8b/1.8.9/cloudclient/src/main/java/dev/cloudmc/feature/setting/SettingManager.java#L33-L35", "sha": "4e2752fdcb45f6f45468aee1eb247c1da41bcd8b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BurlapOutput.writeBytes", "code": "public void writeBytes(byte []buffer, int offset, int length)\n throws IOException\n {\n if (buffer == null) {\n print(\"\");\n }\n else {\n print(\"\");\n\n int i = 0;\n for (; i + 2 < length; i += 3) {\n if (i != 0 && (i & 0x3f) == 0)\n print('\\n');\n\n int v = (((buffer[offset + i] & 0xff) << 16) +\n ((buffer[offset + i + 1] & 0xff) << 8) + \n (buffer[offset + i + 2] & 0xff));\n\n print(encode(v >> 18));\n print(encode(v >> 12));\n print(encode(v >> 6));\n print(encode(v));\n }\n\n if (i + 1 < length) {\n int v = (((buffer[offset + i] & 0xff) << 8) +\n (buffer[offset + i + 1] & 0xff));\n\n print(encode(v >> 10));\n print(encode(v >> 4));\n print(encode(v << 2));\n print('=');\n }\n else if (i < length) {\n int v = buffer[offset + i] & 0xff;\n\n print(encode(v >> 2));\n print(encode(v << 4));\n print('=');\n print('=');\n }\n \n print(\"\");\n }\n }", "docstring": "/**\n * Writes a byte array to the stream.\n * The array will be written with the following syntax:\n *\n *
\n   * <base64>bytes</base64>\n   * 
\n *\n * If the value is null, it will be written as\n *\n *
\n   * <null></null>\n   * 
\n *\n * @param value the string value to write.\n */", "url": "https://github.com/vivo/MoonBox/blob/5a462f16a21eb8515a3b2387a355e34c5fabd9de/moonbox-common/moonbox-hessian-lite/src/main/java/com/caucho/burlap/io/BurlapOutput.java#L610-L654", "sha": "5a462f16a21eb8515a3b2387a355e34c5fabd9de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Hessian2Output.reset", "code": "public void reset()\n {\n if (_refs != null) {\n _refs.clear();\n _refCount = 0;\n }\n\n _classRefs.clear();\n _typeRefs = null;\n _offset = 0;\n _isPacket = false;\n _isUnshared = false;\n }", "docstring": "/**\n * Resets all counters and references\n */", "url": "https://github.com/vivo/MoonBox/blob/5a462f16a21eb8515a3b2387a355e34c5fabd9de/moonbox-common/moonbox-hessian-lite/src/main/java/com/caucho/hessian/io/Hessian2Output.java#L1688-L1700", "sha": "5a462f16a21eb8515a3b2387a355e34c5fabd9de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HessianOutput.writeInt", "code": "public void writeInt(int value)\n throws IOException\n {\n os.write('I');\n os.write(value >> 24);\n os.write(value >> 16);\n os.write(value >> 8);\n os.write(value);\n }", "docstring": "/**\n * Writes an integer value to the stream. The integer will be written\n * with the following syntax:\n *\n *
\n   * I b32 b24 b16 b8\n   * 
\n *\n * @param value the integer value to write.\n */", "url": "https://github.com/vivo/MoonBox/blob/5a462f16a21eb8515a3b2387a355e34c5fabd9de/moonbox-common/moonbox-hessian-lite/src/main/java/com/caucho/hessian/io/HessianOutput.java#L442-L450", "sha": "5a462f16a21eb8515a3b2387a355e34c5fabd9de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Hash.getAlgorithm", "code": "public byte getAlgorithm() {\n return algorithm;\n }", "docstring": "/**\n * Get Algorithm tag.\n *\n * @return algorithm - The algorithm tag of crypto operation.\n */", "url": "https://github.com/vitorpamplona/amethyst/blob/f0116ad06eac2210fe9dd4ced035f78c969ad504/quartz/src/main/java/com/vitorpamplona/quartz/nip03Timestamp/ots/Hash.java#L65-L67", "sha": "f0116ad06eac2210fe9dd4ced035f78c969ad504"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Room.Builder.clearLiveId", "code": "public Builder clearLiveId() {\n bitField0_ = (bitField0_ & ~0x00000080);\n liveId_ = 0L;\n onChanged();\n return this;\n }", "docstring": "/**\n * int64 live_id = 14;\n * @return This builder for chaining.\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/douyin_hack/proto_entity/webcast/data/Room.java#L990-L995", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GiftMessage.Builder.getGiftOrBuilder", "code": "public cool.scx.live_room_watcher.impl.douyin_hack.proto_entity.webcast.data.GiftStructOrBuilder getGiftOrBuilder() {\n if (giftBuilder_ != null) {\n return giftBuilder_.getMessageOrBuilder();\n } else {\n return gift_ == null ?\n cool.scx.live_room_watcher.impl.douyin_hack.proto_entity.webcast.data.GiftStruct.getDefaultInstance() : gift_;\n }\n }", "docstring": "/**\n * .GiftStruct gift = 15;\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/douyin_hack/proto_entity/webcast/im/GiftMessage.java#L1164-L1171", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Response.Builder.addAllMessages", "code": "public Builder addAllMessages(\n java.lang.Iterable values) {\n if (messagesBuilder_ == null) {\n ensureMessagesIsMutable();\n com.google.protobuf.AbstractMessageLite.Builder.addAll(\n values, messages_);\n onChanged();\n } else {\n messagesBuilder_.addAllMessages(values);\n }\n return this;\n }", "docstring": "/**\n * repeated .Message messages = 1;\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/douyin_hack/proto_entity/webcast/im/Response.java#L1103-L1114", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RoomRankMessage.RoomRank.hasUser", "code": "@java.lang.Override\n public boolean hasUser() {\n return ((bitField0_ & 0x00000001) != 0);\n }", "docstring": "/**\n * .User user = 1;\n * @return Whether the user field is set.\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/douyin_hack/proto_entity/webcast/im/RoomRankMessage.java#L132-L135", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RoomRankMessage.Builder.getScoreStrBytes", "code": "public com.google.protobuf.ByteString\n getScoreStrBytes() {\n java.lang.Object ref = scoreStr_;\n if (ref instanceof String) {\n com.google.protobuf.ByteString b = \n com.google.protobuf.ByteString.copyFromUtf8(\n (java.lang.String) ref);\n scoreStr_ = b;\n return b;\n } else {\n return (com.google.protobuf.ByteString) ref;\n }\n }", "docstring": "/**\n * string score_str = 2;\n * @return The bytes for scoreStr.\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/douyin_hack/proto_entity/webcast/im/RoomRankMessage.java#L707-L719", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GiftStruct.getName", "code": "@java.lang.Override\n public java.lang.String getName() {\n java.lang.Object ref = name_;\n if (ref instanceof java.lang.String) {\n return (java.lang.String) ref;\n } else {\n com.google.protobuf.ByteString bs = \n (com.google.protobuf.ByteString) ref;\n java.lang.String s = bs.toStringUtf8();\n name_ = s;\n return s;\n }\n }", "docstring": "/**\n * string name = 16;\n * @return The name.\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/tiktok_hack/proto_entity/webcast/data/GiftStruct.java#L57-L69", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChatMessage.getEmotesList", "code": "@java.lang.Override\n public cool.scx.live_room_watcher.impl.tiktok_hack.proto_entity.webcast.im.ChatMessage.EmoteWithIndex getEmotesList(int index) {\n return emotesList_.get(index);\n }", "docstring": "/**\n * repeated .tiktok_hack.ChatMessage.EmoteWithIndex emotesList = 13;\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/tiktok_hack/proto_entity/webcast/im/ChatMessage.java#L723-L726", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Common.getDependId", "code": "@java.lang.Override\n public cool.scx.live_room_watcher.impl.tiktok_hack.proto_entity.webcast.data.LiveMessageID getDependId() {\n return dependId_ == null ? cool.scx.live_room_watcher.impl.tiktok_hack.proto_entity.webcast.data.LiveMessageID.getDefaultInstance() : dependId_;\n }", "docstring": "/**\n * .tiktok_hack.LiveMessageID dependId = 20;\n * @return The dependId.\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/tiktok_hack/proto_entity/webcast/im/Common.java#L542-L545", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Common.Builder.setRoomMessageHeatLevel", "code": "public Builder setRoomMessageHeatLevel(\n java.lang.String value) {\n if (value == null) { throw new NullPointerException(); }\n roomMessageHeatLevel_ = value;\n bitField0_ |= 0x00200000;\n onChanged();\n return this;\n }", "docstring": "/**\n * string roomMessageHeatLevel = 22;\n * @param value The roomMessageHeatLevel to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/tiktok_hack/proto_entity/webcast/im/Common.java#L3261-L3268", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LikeMessage.getEffectCnt", "code": "@java.lang.Override\n public java.lang.String getEffectCnt() {\n java.lang.Object ref = effectCnt_;\n if (ref instanceof java.lang.String) {\n return (java.lang.String) ref;\n } else {\n com.google.protobuf.ByteString bs = \n (com.google.protobuf.ByteString) ref;\n java.lang.String s = bs.toStringUtf8();\n effectCnt_ = s;\n return s;\n }\n }", "docstring": "/**\n * string effectCnt = 9;\n * @return The effectCnt.\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/tiktok_hack/proto_entity/webcast/im/LikeMessage.java#L310-L322", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LikeMessage.getPublicAreaMessageCommonOrBuilder", "code": "@java.lang.Override\n public cool.scx.live_room_watcher.impl.tiktok_hack.proto_entity.webcast.im.PublicAreaMessageCommonOrBuilder getPublicAreaMessageCommonOrBuilder() {\n return publicAreaMessageCommon_ == null ? cool.scx.live_room_watcher.impl.tiktok_hack.proto_entity.webcast.im.PublicAreaMessageCommon.getDefaultInstance() : publicAreaMessageCommon_;\n }", "docstring": "/**\n * .tiktok_hack.PublicAreaMessageCommon publicAreaMessageCommon = 11;\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/tiktok_hack/proto_entity/webcast/im/LikeMessage.java#L404-L407", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Response.Builder.getIsFirstBytes", "code": "public com.google.protobuf.ByteString\n getIsFirstBytes() {\n java.lang.Object ref = isFirst_;\n if (ref instanceof String) {\n com.google.protobuf.ByteString b = \n com.google.protobuf.ByteString.copyFromUtf8(\n (java.lang.String) ref);\n isFirst_ = b;\n return b;\n } else {\n return (com.google.protobuf.ByteString) ref;\n }\n }", "docstring": "/**\n * string isFirst = 11;\n * @return The bytes for isFirst.\n */", "url": "https://github.com/scx567888/live-room-watcher/blob/eda31ba1f69d13e132f428e797ca0479d47afff7/src/main/java/cool/scx/live_room_watcher/impl/tiktok_hack/proto_entity/webcast/im/Response.java#L1882-L1894", "sha": "eda31ba1f69d13e132f428e797ca0479d47afff7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SysRoleController.getInfo", "code": "@PreAuthorize(\"@ss.hasPermi('system:role:query')\")\n @GetMapping(value = \"/{roleId}\")\n public AjaxResult getInfo(@PathVariable Long roleId)\n {\n roleService.checkRoleDataScope(roleId);\n return AjaxResult.success(roleService.selectRoleById(roleId));\n }", "docstring": "/**\n * 根据角色编号获取详细信息\n */", "url": "https://github.com/zccbbg/ruoyi-mall/blob/aef63aa6fd830fb92b01189b3ea5b6ee49662ef8/ruoyi-admin/src/main/java/com/ruoyi/web/controller/system/SysRoleController.java#L75-L81", "sha": "aef63aa6fd830fb92b01189b3ea5b6ee49662ef8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GenTableServiceImpl.validateEdit", "code": "@Override\n public void validateEdit(GenTable genTable) {\n if (GenConstants.TPL_TREE.equals(genTable.getTplCategory())) {\n String options = JSON.toJSONString(genTable.getParams());\n JSONObject paramsObj = JSONObject.parseObject(options);\n if (StringUtils.isEmpty(paramsObj.getString(GenConstants.TREE_CODE))) {\n throw new ServiceException(\"树编码字段不能为空\");\n } else if (StringUtils.isEmpty(paramsObj.getString(GenConstants.TREE_PARENT_CODE))) {\n throw new ServiceException(\"树父编码字段不能为空\");\n } else if (StringUtils.isEmpty(paramsObj.getString(GenConstants.TREE_NAME))) {\n throw new ServiceException(\"树名称字段不能为空\");\n } else if (GenConstants.TPL_SUB.equals(genTable.getTplCategory())) {\n if (StringUtils.isEmpty(genTable.getSubTableName())) {\n throw new ServiceException(\"关联子表的表名不能为空\");\n } else if (StringUtils.isEmpty(genTable.getSubTableFkName())) {\n throw new ServiceException(\"子表关联的外键名不能为空\");\n }\n }\n }\n }", "docstring": "/**\n * 修改保存参数校验\n *\n * @param genTable 业务信息\n */", "url": "https://github.com/zccbbg/ruoyi-mall/blob/aef63aa6fd830fb92b01189b3ea5b6ee49662ef8/ruoyi-generator/src/main/java/com/ruoyi/generator/service/GenTableServiceImpl.java#L427-L446", "sha": "aef63aa6fd830fb92b01189b3ea5b6ee49662ef8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Env.sanitizeFunction", "code": "private static CelFunctionDecl sanitizeFunction(CelFunctionDecl func) {\n boolean needsSanitizing = false;\n for (CelOverloadDecl o : func.overloads()) {\n if (isWellKnownType(o.resultType())) {\n needsSanitizing = true;\n break;\n }\n for (CelType p : o.parameterTypes()) {\n if (isWellKnownType(p)) {\n needsSanitizing = true;\n break;\n }\n }\n }\n if (!needsSanitizing) {\n return func;\n }\n\n CelFunctionDecl.Builder funcBuilder = func.toBuilder();\n ImmutableSet.Builder overloadsBuilder = new ImmutableSet.Builder<>();\n for (CelOverloadDecl overloadDecl : funcBuilder.overloads()) {\n CelOverloadDecl.Builder overloadBuilder = overloadDecl.toBuilder();\n CelType resultType = overloadBuilder.build().resultType();\n if (isWellKnownType(resultType)) {\n overloadBuilder.setResultType(getWellKnownType(resultType));\n }\n\n ImmutableList.Builder parameterTypeBuilder = ImmutableList.builder();\n for (CelType paramType : overloadBuilder.parameterTypes()) {\n if (isWellKnownType(paramType)) {\n parameterTypeBuilder.add(getWellKnownType(paramType));\n } else {\n parameterTypeBuilder.add(paramType);\n }\n }\n overloadBuilder.setParameterTypes(parameterTypeBuilder.build());\n overloadsBuilder.add(overloadBuilder.build());\n }\n return funcBuilder.setOverloads(overloadsBuilder.build()).build();\n }", "docstring": "/**\n * Sanitize the function declaration type making sure that proto-based message names appearing in\n * the result or parameter types are mapped to the appropriate CEL types.\n */", "url": "https://github.com/google/cel-java/blob/3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9/checker/src/main/java/dev/cel/checker/Env.java#L943-L982", "sha": "3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Exercise2.eval", "code": "@SuppressWarnings(\"DoNotCallSuggester\")\n Object eval(CelAbstractSyntaxTree ast, Map parameterValues) {\n throw new UnsupportedOperationException(\"To be implemented\");\n }", "docstring": "/**\n * Evaluates the compiled AST with the user provided parameter values.\n *\n * @throws IllegalArgumentException If the compiled expression in AST fails to evaluate.\n */", "url": "https://github.com/google/cel-java/blob/3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9/codelab/src/main/codelab/Exercise2.java#L48-L51", "sha": "3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CelSource.getLocationOffsetImpl", "code": "private static Optional getLocationOffsetImpl(\n List lineOffsets, int line, int column) {\n if (line <= 0 || column < 0) {\n return Optional.empty();\n }\n int offset = CelSourceHelper.findLineOffset(lineOffsets, line);\n if (offset == -1) {\n return Optional.empty();\n }\n return Optional.of(offset + column);\n }", "docstring": "/**\n * Get the code point offset within the source expression text that corresponds with the {@code\n * line} and {@code column}.\n *\n * @param line the line number starting from 1\n * @param column the column number starting from 0\n */", "url": "https://github.com/google/cel-java/blob/3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9/common/src/main/java/dev/cel/common/CelSource.java#L123-L133", "sha": "3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CelComprehension.Builder.getEntries", "code": "public ImmutableList getEntries() {\n return ImmutableList.copyOf(mutableEntries);\n }", "docstring": "/** Returns an immutable copy of the current mutable entries present in the builder. */", "url": "https://github.com/google/cel-java/blob/3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9/common/src/main/java/dev/cel/common/ast/CelExpr.java#L800-L802", "sha": "3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CelExprFactory.fold", "code": "public final CelExpr fold(\n String iterVar,\n CelExpr.Builder iterRange,\n String accuVar,\n CelExpr accuInit,\n CelExpr.Builder condition,\n CelExpr step,\n CelExpr result) {\n return fold(iterVar, iterRange.build(), accuVar, accuInit, condition.build(), step, result);\n }", "docstring": "/** Fold creates a fold comprehension instruction. */", "url": "https://github.com/google/cel-java/blob/3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9/common/src/main/java/dev/cel/common/ast/CelExprFactory.java#L245-L254", "sha": "3819aef4d7d5e09ec59bcdfe3ff39b9bd70523e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DemoDialogController.frame1", "code": "@GetMapping(\"/frame1\")\n public String frame1()\n {\n return prefix + \"/table/frame1\";\n }", "docstring": "/**\n * 多层窗口frame1\n */", "url": "https://github.com/zhugezifang/dataService/blob/a25f1c85f05d39b693e44f0981b791c09481a784/admin/src/main/java/com/vince/xq/web/controller/demo/controller/DemoDialogController.java#L84-L88", "sha": "a25f1c85f05d39b693e44f0981b791c09481a784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RuoYiConfig.getUploadPath", "code": "public static String getUploadPath()\n {\n return getProfile() + \"/upload\";\n }", "docstring": "/**\n * 获取上传路径\n */", "url": "https://github.com/zhugezifang/dataService/blob/a25f1c85f05d39b693e44f0981b791c09481a784/common/src/main/java/com/vince/xq/common/config/RuoYiConfig.java#L120-L123", "sha": "a25f1c85f05d39b693e44f0981b791c09481a784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IpUtils.getIpAddr", "code": "public static String getIpAddr(HttpServletRequest request)\n {\n if (request == null)\n {\n return \"unknown\";\n }\n String ip = request.getHeader(\"x-forwarded-for\");\n if (ip == null || ip.length() == 0 || \"unknown\".equalsIgnoreCase(ip))\n {\n ip = request.getHeader(\"Proxy-Client-IP\");\n }\n if (ip == null || ip.length() == 0 || \"unknown\".equalsIgnoreCase(ip))\n {\n ip = request.getHeader(\"X-Forwarded-For\");\n }\n if (ip == null || ip.length() == 0 || \"unknown\".equalsIgnoreCase(ip))\n {\n ip = request.getHeader(\"WL-Proxy-Client-IP\");\n }\n if (ip == null || ip.length() == 0 || \"unknown\".equalsIgnoreCase(ip))\n {\n ip = request.getHeader(\"X-Real-IP\");\n }\n\n if (ip == null || ip.length() == 0 || \"unknown\".equalsIgnoreCase(ip))\n {\n ip = request.getRemoteAddr();\n }\n\n return \"0:0:0:0:0:0:0:1\".equals(ip) ? \"127.0.0.1\" : getMultistageReverseProxyIp(ip);\n }", "docstring": "/**\n * 获取客户端IP\n * \n * @param request 请求对象\n * @return IP地址\n */", "url": "https://github.com/zhugezifang/dataService/blob/a25f1c85f05d39b693e44f0981b791c09481a784/common/src/main/java/com/vince/xq/common/utils/IpUtils.java#L20-L50", "sha": "a25f1c85f05d39b693e44f0981b791c09481a784"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LispLexer.ZZ_CMAP", "code": "public static int ZZ_CMAP(int ch) {\n return ZZ_CMAP_A[ZZ_CMAP_Y[ZZ_CMAP_Z[ch>>13]|((ch>>7)&0x3f)]|(ch&0x7f)];\n }", "docstring": "/** \n * Translates characters to character classes\n * Chosen bits are [8, 6, 7]\n * Total runtime size is 1040 bytes\n */", "url": "https://github.com/Enerccio/SLT/blob/177139b3f9d81aa108641340679b65af78073d93/src/main/gen/com/en_circle/slt/plugin/lisp/LispLexer.java#L62-L64", "sha": "177139b3f9d81aa108641340679b65af78073d93"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Keyboard.populateKeyLookupTables", "code": "public static void populateKeyLookupTables() {\n for (int key = 0; key <= 255; key++) {\n getKeyName(key);\n }\n }", "docstring": "/** Populates the key name->index lookup table with the current keyboard layout based names. */", "url": "https://github.com/GTNewHorizons/lwjgl3ify/blob/6df8300fd129ba3d1bd4c75115f79cdd43b14d69/src/main/java/org/lwjglx/input/Keyboard.java#L223-L227", "sha": "6df8300fd129ba3d1bd4c75115f79cdd43b14d69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WaveData.create", "code": "public static WaveData create(ByteBuffer buffer) {\n try {\n byte[] bytes = null;\n\n if (buffer.hasArray()) {\n bytes = buffer.array();\n } else {\n bytes = new byte[buffer.capacity()];\n buffer.get(bytes);\n }\n return create(bytes);\n } catch (Exception e) {\n org.lwjglx.LWJGLUtil.log(\"Unable to create from ByteBuffer, \" + e.getMessage());\n return null;\n }\n }", "docstring": "/**\n * Creates a WaveData container from the specified ByetBuffer. If the buffer is backed by an array, it will be used\n * directly, else the contents of the buffer will be copied using get(byte[]).\n *\n * @param buffer ByteBuffer containing sound file\n * @return WaveData containing data, or null if a failure occured\n */", "url": "https://github.com/GTNewHorizons/lwjgl3ify/blob/6df8300fd129ba3d1bd4c75115f79cdd43b14d69/src/main/java/org/lwjglx/util/WaveData.java#L143-L158", "sha": "6df8300fd129ba3d1bd4c75115f79cdd43b14d69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Matrix4f.invert", "code": "public Matrix invert() {\n return invert(this, this);\n }", "docstring": "/**\n * Invert this matrix\n * \n * @return this if successful, null otherwise\n */", "url": "https://github.com/GTNewHorizons/lwjgl3ify/blob/6df8300fd129ba3d1bd4c75115f79cdd43b14d69/src/main/java/org/lwjglx/util/vector/Matrix4f.java#L747-L749", "sha": "6df8300fd129ba3d1bd4c75115f79cdd43b14d69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FileUtils.viewImage", "code": "public void viewImage(@NonNull Uri imageUri) {\n Intent intent = new Intent();\n intent.setAction(Intent.ACTION_VIEW);\n intent.setDataAndType(imageUri, \"image/*\");\n intent.setFlags(FLAG_GRANT_READ_URI_PERMISSION);\n try {\n getContext().startActivity(intent);\n } catch (ActivityNotFoundException e) {\n e.printStackTrace();\n }\n }", "docstring": "/**\n * Instantiates view intent to view an image in default image viewer\n *\n * @param imageUri The image {@link URI}, which is to be viewed\n */", "url": "https://github.com/AlfaazPlus/QuranApp/blob/cfd95575d4ba66a5cf2313f13d03293d679d5d09/app/src/main/java/com/quranapp/android/utils/univ/FileUtils.java#L225-L235", "sha": "cfd95575d4ba66a5cf2313f13d03293d679d5d09"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ManyToManyAssembleOperationHandler.getSourcesFromContainer", "code": "@SuppressWarnings(\"unchecked\")\n @Override\n protected Map getSourcesFromContainer(Container container, Collection targets) {\n Set keys = targets.stream()\n .map(Target::getKey)\n .map(k -> (Collection)k)\n .flatMap(Collection::stream)\n .collect(Collectors.toSet());\n return (Map)((Container)container).get(keys);\n }", "docstring": "/**\n * Obtain the corresponding data source object from the data source container based on the entity's key value.\n *\n * @param container container\n * @param targets targets\n * @return source objects\n */", "url": "https://github.com/opengoofy/crane4j/blob/bfc292cbcb272bc40f8c2fc6295ba9b8ac3efdc1/crane4j-core/src/main/java/cn/crane4j/core/executor/handler/ManyToManyAssembleOperationHandler.java#L49-L58", "sha": "bfc292cbcb272bc40f8c2fc6295ba9b8ac3efdc1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ClassScanner.scan", "code": "public Set> scan(String... basePackages) {\n return scan(Objects::nonNull, basePackages);\n }", "docstring": "/**\n *

Scan all classes under the specified package.\n *\n * @param basePackages the specified package\n * @return a set of classes\n */", "url": "https://github.com/opengoofy/crane4j/blob/bfc292cbcb272bc40f8c2fc6295ba9b8ac3efdc1/crane4j-extension/crane4j-extension-spring/src/main/java/cn/crane4j/extension/spring/scanner/ClassScanner.java#L60-L62", "sha": "bfc292cbcb272bc40f8c2fc6295ba9b8ac3efdc1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SwerveModule.getState", "code": "public SwerveModuleState getState()\n {\n double velocity;\n Rotation2d azimuth;\n if (!SwerveDriveTelemetry.isSimulation)\n {\n velocity = driveVelocityCache.getValue();\n azimuth = Rotation2d.fromDegrees(getAbsolutePosition());\n } else\n {\n return simModule.getState();\n }\n return new SwerveModuleState(velocity, azimuth);\n }", "docstring": "/**\n * Get the Swerve Module state.\n *\n * @return Current SwerveModule state.\n */", "url": "https://github.com/BroncBotz3481/YAGSL/blob/62f8236678090186dbda41719c1cc2e8e6b38448/swervelib/SwerveModule.java#L537-L550", "sha": "62f8236678090186dbda41719c1cc2e8e6b38448"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CANCoderSwerve.getAbsoluteEncoder", "code": "@Override\n public Object getAbsoluteEncoder()\n {\n return encoder;\n }", "docstring": "/**\n * Get the instantiated absolute encoder Object.\n *\n * @return Absolute encoder object.\n */", "url": "https://github.com/BroncBotz3481/YAGSL/blob/62f8236678090186dbda41719c1cc2e8e6b38448/swervelib/encoders/CANCoderSwerve.java#L202-L206", "sha": "62f8236678090186dbda41719c1cc2e8e6b38448"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CanandgyroSwerve.factoryDefault", "code": "@Override\n public void factoryDefault()\n {\n imu.resetFactoryDefaults(STATUS_TIMEOUT_SECONDS);\n }", "docstring": "/**\n * Reset {@link Canandgyro} to factory default.\n */", "url": "https://github.com/BroncBotz3481/YAGSL/blob/62f8236678090186dbda41719c1cc2e8e6b38448/swervelib/imu/CanandgyroSwerve.java#L52-L56", "sha": "62f8236678090186dbda41719c1cc2e8e6b38448"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TikTokensTest.chatCompletionTokensTest", "code": "@Test\n public void chatCompletionTokensTest() {\n ChatCompletion completion = ChatCompletion.builder().messages(messages).build();\n long tokens = completion.tokens();\n log.info(\"Message集合文本:【{}】\", messages, tokens);\n log.info(\"总tokens数{}\", tokens);\n }", "docstring": "/**\n * gpt-3.5和gpt4.0聊天模型接口计算推荐这种方法\n */", "url": "https://github.com/Grt1228/chatgpt-java/blob/ae761b89450812fab4cd7bba6626f9874705ca40/src/test/java/com/unfbx/chatgpt/TikTokensTest.java#L40-L46", "sha": "ae761b89450812fab4cd7bba6626f9874705ca40"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Cffu.get", "code": "@Blocking\n @Nullable\n @Override\n public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {\n checkMinimalStage();\n return cf.get(timeout, unit);\n }", "docstring": "/**\n * Waits if necessary for at most the given time for the computation to complete,\n * and then retrieves its result, if available.\n *\n * @param timeout the maximum time to wait\n * @param unit the time unit of the timeout argument\n * @return the computed result\n * @throws CancellationException if the computation was cancelled\n * @throws ExecutionException if the computation threw an exception\n * @throws InterruptedException if the current thread was interrupted while waiting\n * @throws TimeoutException if the wait timed out\n */", "url": "https://github.com/foldright/cffu/blob/c4e2d4d6dae280e06a7267b9793e6d3e66c9dcee/cffu-core/src/main/java/io/foldright/cffu/Cffu.java#L2204-L2210", "sha": "c4e2d4d6dae280e06a7267b9793e6d3e66c9dcee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ST_subtree_t.isSameThan", "code": "@Override\n\tpublic boolean isSameThan(__ptr__ other) {\n\t\tST_subtree_t other2 = (ST_subtree_t) other;\n\t\treturn this == other2;\n\t}", "docstring": "/* union find */", "url": "https://github.com/plantuml/plantuml-core/blob/237443772415621f188204ee61201e0d6017015f/plantuml-core/src/main/java/h/ST_subtree_t.java#L15-L19", "sha": "237443772415621f188204ee61201e0d6017015f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UmlSource.getDiagramType", "code": "public DiagramType getDiagramType() {\n\t\treturn DiagramType.getTypeFromArobaseStart(source.get(0).getString());\n\t}", "docstring": "/**\n\t * Retrieve the type of the diagram. This is based on the first line\n\t * @startfoo.\n\t *\n\t * @return the type of the diagram.\n\t */", "url": "https://github.com/plantuml/plantuml-core/blob/237443772415621f188204ee61201e0d6017015f/plantuml-core/src/main/java/net/sourceforge/plantuml/core/UmlSource.java#L111-L113", "sha": "237443772415621f188204ee61201e0d6017015f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "JsonObject.set", "code": "public JsonObject set(String name, double value) {\n\t\tset(name, Json.value(value));\n\t\treturn this;\n\t}", "docstring": "/**\n\t * Sets the value of the member with the specified name to the JSON\n\t * representation of the specified double value. If this object\n\t * does not contain a member with this name, a new member is added at the end of\n\t * the object. If this object contains multiple members with this name, only the\n\t * last one is changed.\n\t *

\n\t * This method should only be used to modify existing objects.\n\t * To fill a new object with members, the method add(name, value)\n\t * should be preferred which is much faster (as it does not need to search for\n\t * existing members).\n\t *

\n\t *\n\t * @param name the name of the member to add\n\t * @param value the value of the member to add\n\t * @return the object itself, to enable method chaining\n\t */", "url": "https://github.com/plantuml/plantuml-core/blob/237443772415621f188204ee61201e0d6017015f/plantuml-core/src/main/java/net/sourceforge/plantuml/json/JsonObject.java#L399-L402", "sha": "237443772415621f188204ee61201e0d6017015f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CropImageView.isShowProgressBar", "code": "public boolean isShowProgressBar() {\n return mShowProgressBar;\n }", "docstring": "/**\n * if to show progress bar when image async loading/cropping is in progress.
\n * default: true, disable to provide custom progress bar UI.\n */", "url": "https://github.com/nammayatri/nammayatri/blob/aa2f5492eaf4a0ab14f3333444ffb78c89ad63c8/Frontend/android-native/mobility-common/src/main/java/in/juspay/mobility/common/cropImage/CropImageView.java#L541-L543", "sha": "aa2f5492eaf4a0ab14f3333444ffb78c89ad63c8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IdentifyTraitsPersistence.getLines", "code": "protected Stream getLines(Path file) throws IOException {\n return Files.lines(file);\n }", "docstring": "/* for testing purposes */", "url": "https://github.com/carlrobertoh/CodeGPT/blob/93c711d5da8edb409e0da641a8bb6438fbda26d4/codegpt-telemetry/src/main/java/ee/carlrobert/codegpt/telemetry/core/service/segment/IdentifyTraitsPersistence.java#L75-L77", "sha": "93c711d5da8edb409e0da641a8bb6438fbda26d4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Capability.getName", "code": "public String getName() { return name; }", "docstring": "/**\n * @return The unique name of this capability, typically this is\n * the fully qualified class name for the target interface.\n */", "url": "https://github.com/CleanroomMC/Cleanroom/blob/fa5a17bf6c3620f4cb30932efb916349d8a96715/src/main/java/net/minecraftforge/common/capabilities/Capability.java#L89-L89", "sha": "fa5a17bf6c3620f4cb30932efb916349d8a96715"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Property.isBooleanValue", "code": "public boolean isBooleanValue()\n {\n return (\"true\".equals(value.toLowerCase()) || \"false\".equals(value.toLowerCase()));\n }", "docstring": "/**\n * Checks if the current value held by this property is a valid boolean value.\n *\n * @return True if it is a boolean value\n */", "url": "https://github.com/CleanroomMC/Cleanroom/blob/fa5a17bf6c3620f4cb30932efb916349d8a96715/src/main/java/net/minecraftforge/common/config/Property.java#L884-L887", "sha": "fa5a17bf6c3620f4cb30932efb916349d8a96715"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnimationStateMachine.load", "code": "@SideOnly(Side.CLIENT)\n public static IAnimationStateMachine load(IResourceManager manager, ResourceLocation location, ImmutableMap customParameters)\n {\n try (IResource resource = manager.getResource(location))\n {\n ClipResolver clipResolver = new ClipResolver();\n ParameterResolver parameterResolver = new ParameterResolver(customParameters);\n Clips.CommonClipTypeAdapterFactory.INSTANCE.setClipResolver(clipResolver);\n TimeValues.CommonTimeValueTypeAdapterFactory.INSTANCE.setValueResolver(parameterResolver);\n AnimationStateMachine asm = asmGson.fromJson(new InputStreamReader(resource.getInputStream(), StandardCharsets.UTF_8), AnimationStateMachine.class);\n clipResolver.asm = asm;\n parameterResolver.asm = asm;\n asm.initialize();\n //String json = asmGson.toJson(asm);\n //System.out.println(location + \": \" + json);\n return asm;\n }\n catch(IOException | JsonParseException e)\n {\n FMLLog.log.error(\"Exception loading Animation State Machine {}, skipping\", location, e);\n return missing;\n }\n finally\n {\n Clips.CommonClipTypeAdapterFactory.INSTANCE.setClipResolver(null);\n TimeValues.CommonTimeValueTypeAdapterFactory.INSTANCE.setValueResolver(null);\n }\n }", "docstring": "/**\n * Load a new instance if AnimationStateMachine at specified location, with specified custom parameters.\n */", "url": "https://github.com/CleanroomMC/Cleanroom/blob/fa5a17bf6c3620f4cb30932efb916349d8a96715/src/main/java/net/minecraftforge/common/model/animation/AnimationStateMachine.java#L216-L243", "sha": "fa5a17bf6c3620f4cb30932efb916349d8a96715"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GameRegistry.getFuelValue", "code": "@Deprecated\n public static int getFuelValue(@Nonnull ItemStack itemStack)\n {\n return ForgeEventFactory.getItemBurnTime(itemStack);\n }", "docstring": "/**\n * @deprecated use {@link ForgeEventFactory#getItemBurnTime(ItemStack)}\n */", "url": "https://github.com/CleanroomMC/Cleanroom/blob/fa5a17bf6c3620f4cb30932efb916349d8a96715/src/main/java/net/minecraftforge/fml/common/registry/GameRegistry.java#L254-L258", "sha": "fa5a17bf6c3620f4cb30932efb916349d8a96715"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TextureLoader.get2Fold", "code": "private static int get2Fold(int fold) {\n int ret = 2;\n while (ret < fold) {\n ret *= 2;\n }\n return ret;\n }", "docstring": "/**\n * Get the closest greater power of 2 to the fold number\n *\n * @param fold The target number\n * @return The power of 2\n */", "url": "https://github.com/CleanroomMC/Cleanroom/blob/fa5a17bf6c3620f4cb30932efb916349d8a96715/src/main/java/org/lwjgl/test/spaceinvaders/TextureLoader.java#L186-L192", "sha": "fa5a17bf6c3620f4cb30932efb916349d8a96715"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Matrix4f.transpose", "code": "public Matrix transpose() {\n return transpose(this);\n }", "docstring": "/**\n * Transpose this matrix\n * \n * @return this\n */", "url": "https://github.com/CleanroomMC/Cleanroom/blob/fa5a17bf6c3620f4cb30932efb916349d8a96715/src/main/java/org/lwjgl/util/vector/Matrix4f.java#L431-L433", "sha": "fa5a17bf6c3620f4cb30932efb916349d8a96715"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Vector2f.getY", "code": "public final float getY() {\n return y;\n }", "docstring": "/**\n * @return y\n */", "url": "https://github.com/CleanroomMC/Cleanroom/blob/fa5a17bf6c3620f4cb30932efb916349d8a96715/src/main/java/org/lwjgl/util/vector/Vector2f.java#L253-L255", "sha": "fa5a17bf6c3620f4cb30932efb916349d8a96715"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SwaggerConfig.createRestApi", "code": "@Bean\n public Docket createRestApi() {\n return new Docket(DocumentationType.OAS_30)\n // 是否启用Swagger\n .enable(enabled)\n // 用来创建该API的基本信息,展示在文档的页面中(自定义展示的信息)\n .apiInfo(apiInfo())\n // 设置哪些接口暴露给Swagger展示\n .select()\n // 扫描所有有注解的api,用这种方式更灵活\n .apis(RequestHandlerSelectors.withMethodAnnotation(Operation.class))\n // 扫描指定包中的swagger注解\n // .apis(RequestHandlerSelectors.basePackage(\"com.ruoyi.project.tool.swagger\"))\n // 扫描所有 .apis(RequestHandlerSelectors.any())\n .paths(PathSelectors.any())\n .build()\n /* 设置安全模式,swagger可以设置访问token */\n .securitySchemes(securitySchemes())\n .securityContexts(securityContexts());\n }", "docstring": "/**\n * 创建API\n */", "url": "https://github.com/oddfar/campus/blob/b1ef73868f9003f932087957cd4ef6cd4b63c1e8/campus-admin/src/main/java/com/oddfar/campus/admin/config/SwaggerConfig.java#L43-L62", "sha": "b1ef73868f9003f932087957cd4ef6cd4b63c1e8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Convert.toDouble", "code": "public static Double toDouble(Object value) {\n return toDouble(value, null);\n }", "docstring": "/**\n * 转换为double
\n * 如果给定的值为空,或者转换失败,返回默认值null
\n * 转换失败不会报错\n *\n * @param value 被转换的值\n * @return 结果\n */", "url": "https://github.com/oddfar/campus/blob/b1ef73868f9003f932087957cd4ef6cd4b63c1e8/campus-common/src/main/java/com/oddfar/campus/common/core/text/Convert.java#L411-L413", "sha": "b1ef73868f9003f932087957cd4ef6cd4b63c1e8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StringUtils.ishttp", "code": "public static boolean ishttp(String link) {\n return StringUtils.startsWithAny(link, Constants.HTTP, Constants.HTTPS);\n }", "docstring": "/**\n * 是否为http(s)://开头\n *\n * @param link 链接\n * @return 结果\n */", "url": "https://github.com/oddfar/campus/blob/b1ef73868f9003f932087957cd4ef6cd4b63c1e8/campus-common/src/main/java/com/oddfar/campus/common/utils/StringUtils.java#L242-L244", "sha": "b1ef73868f9003f932087957cd4ef6cd4b63c1e8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MyWebMvcConfig.addResourceHandlers", "code": "@Override\n public void addResourceHandlers(ResourceHandlerRegistry registry) {\n\n /** 本地文件上传路径 */\n registry.addResourceHandler(Constants.RESOURCE_PREFIX + \"/**\")\n .addResourceLocations(\"file:\" + ConfigExpander.getFileProfile() + \"/\");\n\n /** swagger配置 */\n registry.addResourceHandler(\"/swagger-ui/**\")\n .addResourceLocations(\"classpath:/META-INF/resources/webjars/springfox-swagger-ui/\");\n }", "docstring": "/**\n * 映射到访问本地的资源文件\n *\n * @param registry\n */", "url": "https://github.com/oddfar/campus/blob/b1ef73868f9003f932087957cd4ef6cd4b63c1e8/campus-framework/src/main/java/com/oddfar/campus/framework/config/MyWebMvcConfig.java#L32-L42", "sha": "b1ef73868f9003f932087957cd4ef6cd4b63c1e8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "JavaCodeEngine.execRequest", "code": "public boolean execRequest(Request request, String code) throws Exception {\n if (StringUtils.isEmpty(code)) {\n return true;\n }\n InMemoryJavaCompiler memoryJavaCompiler = javac(prependPublicToCoolRequestScript(REQUEST_REGEX, code), REQUEST_CLASS);\n Class requestClass = memoryJavaCompiler.getClassloader().loadClass(REQUEST_CLASS);\n return invokeRequest(requestClass, request);\n }", "docstring": "/**\n * 执行前置脚本\n *\n * @return 如果返回false,则表示被脚本拒绝执行\n */", "url": "https://github.com/houxinlin/cool-request/blob/177c5f7de26291eacc0b28c5ade7e470886b0348/src/main/java/com/cool/request/components/http/script/JavaCodeEngine.java#L71-L78", "sha": "177c5f7de26291eacc0b28c5ade7e470886b0348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Schema.set$vocabulary", "code": "public void set$vocabulary(String $vocabulary) {\n this.$vocabulary = $vocabulary;\n }", "docstring": "/**\n *\n * @since 2.2.8 (OpenAPI 3.1.0)\n */", "url": "https://github.com/houxinlin/cool-request/blob/177c5f7de26291eacc0b28c5ade7e470886b0348/src/main/java/com/cool/request/lib/openapi/media/Schema.java#L521-L523", "sha": "177c5f7de26291eacc0b28c5ade7e470886b0348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Schema.getXml", "code": "public XML getXml() {\n return xml;\n }", "docstring": "/**\n * returns the xml property from a Schema instance.\n *\n * @return XML xml\n **/", "url": "https://github.com/houxinlin/cool-request/blob/177c5f7de26291eacc0b28c5ade7e470886b0348/src/main/java/com/cool/request/lib/openapi/media/Schema.java#L1466-L1468", "sha": "177c5f7de26291eacc0b28c5ade7e470886b0348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Assert.isNull", "code": "public static void isNull( Object object, String message) {\n if (object != null) {\n throw new IllegalArgumentException(message);\n }\n }", "docstring": "/**\n * Assert that an object is {@code null}.\n *
Assert.isNull(value, \"The value must be null\");
\n * @param object the object to check\n * @param message the exception message to use if the assertion fails\n * @throws IllegalArgumentException if the object is not {@code null}\n */", "url": "https://github.com/houxinlin/cool-request/blob/177c5f7de26291eacc0b28c5ade7e470886b0348/src/main/java/com/cool/request/utils/Assert.java#L163-L167", "sha": "177c5f7de26291eacc0b28c5ade7e470886b0348"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AttackResultOuterClass.AttackResult.getUnk3300BALCPHMFIBC", "code": "@java.lang.Override\n public int getUnk3300BALCPHMFIBC() {\n return unk3300BALCPHMFIBC_;\n }", "docstring": "/**\n * uint32 Unk3300_BALCPHMFIBC = 1940;\n * @return The unk3300BALCPHMFIBC.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/AttackResultOuterClass.java#L521-L524", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AttackResultOuterClass.AttackResult.getTargetType", "code": "@java.lang.Override\n public int getTargetType() {\n return targetType_;\n }", "docstring": "/**\n * uint32 targetType = 346;\n * @return The targetType.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/AttackResultOuterClass.java#L624-L627", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ModifierPropertyOuterClass.ModifierProperty.getKey", "code": "@java.lang.Override\n public emu.grasscutter.net.proto.AbilityStringOuterClass.AbilityString getKey() {\n return key_ == null ? emu.grasscutter.net.proto.AbilityStringOuterClass.AbilityString.getDefaultInstance() : key_;\n }", "docstring": "/**\n * .AbilityString key = 7;\n * @return The key.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ModifierPropertyOuterClass.java#L151-L154", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ModifierProperty.Builder.getKey", "code": "public emu.grasscutter.net.proto.AbilityStringOuterClass.AbilityString getKey() {\n if (keyBuilder_ == null) {\n return key_ == null ? emu.grasscutter.net.proto.AbilityStringOuterClass.AbilityString.getDefaultInstance() : key_;\n } else {\n return keyBuilder_.getMessage();\n }\n }", "docstring": "/**\n * .AbilityString key = 7;\n * @return The key.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ModifierPropertyOuterClass.java#L522-L528", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RoutePoint.Builder.setPosition", "code": "public Builder setPosition(\n emu.grasscutter.net.proto.VectorOuterClass.Vector.Builder builderForValue) {\n if (positionBuilder_ == null) {\n position_ = builderForValue.build();\n onChanged();\n } else {\n positionBuilder_.setMessage(builderForValue.build());\n }\n\n return this;\n }", "docstring": "/**\n * .Vector position = 1;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/RoutePointOuterClass.java#L1124-L1134", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EvtCombatSteerMotionInfo.Builder.clearFaceDir", "code": "public Builder clearFaceDir() {\n if (faceDirBuilder_ == null) {\n faceDir_ = null;\n onChanged();\n } else {\n faceDir_ = null;\n faceDirBuilder_ = null;\n }\n\n return this;\n }", "docstring": "/**\n * .Vector face_dir = 5;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/EvtCombatSteerMotionInfoOuterClass.java#L746-L756", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TreasureMapRegionInfo.Builder.getRegionCenterPosOrBuilder", "code": "public emu.grasscutter.net.proto.VectorOuterClass.VectorOrBuilder getRegionCenterPosOrBuilder() {\n if (regionCenterPosBuilder_ != null) {\n return regionCenterPosBuilder_.getMessageOrBuilder();\n } else {\n return regionCenterPos_ == null ?\n emu.grasscutter.net.proto.VectorOuterClass.Vector.getDefaultInstance() : regionCenterPos_;\n }\n }", "docstring": "/**\n * .Vector regionCenterPos = 13;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/TreasureMapRegionInfoOuterClass.java#L1123-L1130", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DoGachaRspOuterClass.DoGachaRsp.getWishProgress", "code": "@java.lang.Override\n public int getWishProgress() {\n return wishProgress_;\n }", "docstring": "/**\n * uint32 wishProgress = 12;\n * @return The wishProgress.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/DoGachaRspOuterClass.java#L566-L569", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnimatorParameterValueInfoOuterClass.AnimatorParameterValueInfo.hasIntVal", "code": "@java.lang.Override\n public boolean hasIntVal() {\n return mMJOJNNFGCNCase_ == 2;\n }", "docstring": "/**\n * int32 int_val = 2;\n * @return Whether the intVal field is set.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/AnimatorParameterValueInfoOuterClass.java#L218-L221", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DungeonSettleExhibitionInfo.Builder.getCardList", "code": "public emu.grasscutter.net.proto.ExhibitionDisplayInfoOuterClass.ExhibitionDisplayInfo getCardList(int index) {\n if (cardListBuilder_ == null) {\n return cardList_.get(index);\n } else {\n return cardListBuilder_.getMessage(index);\n }\n }", "docstring": "/**\n * repeated .ExhibitionDisplayInfo cardList = 2;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/DungeonSettleExhibitionInfoOuterClass.java#L637-L643", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ResinChangeNotifyOuterClass.ResinChangeNotify.getNextAddTimestamp", "code": "@java.lang.Override\n public int getNextAddTimestamp() {\n return nextAddTimestamp_;\n }", "docstring": "/**\n * uint32 nextAddTimestamp = 4;\n * @return The nextAddTimestamp.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ResinChangeNotifyOuterClass.java#L279-L282", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ShowEquip.Builder.getWeaponBuilder", "code": "public emu.grasscutter.net.proto.WeaponOuterClass.Weapon.Builder getWeaponBuilder() {\n return getWeaponFieldBuilder().getBuilder();\n }", "docstring": "/**\n * .Weapon weapon = 3;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ShowEquipOuterClass.java#L943-L945", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SceneTeamAvatar.Builder.getWeaponAbilityInfo", "code": "public emu.grasscutter.net.proto.AbilitySyncStateInfoOuterClass.AbilitySyncStateInfo getWeaponAbilityInfo() {\n if (weaponAbilityInfoBuilder_ == null) {\n return weaponAbilityInfo_ == null ? emu.grasscutter.net.proto.AbilitySyncStateInfoOuterClass.AbilitySyncStateInfo.getDefaultInstance() : weaponAbilityInfo_;\n } else {\n return weaponAbilityInfoBuilder_.getMessage();\n }\n }", "docstring": "/**\n * .AbilitySyncStateInfo weaponAbilityInfo = 6;\n * @return The weaponAbilityInfo.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/SceneTeamAvatarOuterClass.java#L1906-L1912", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DetectorData.Builder.clearMaterialId", "code": "public Builder clearMaterialId() {\n \n materialId_ = 0;\n onChanged();\n return this;\n }", "docstring": "/**\n * uint32 materialId = 10;\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/DetectorDataOuterClass.java#L908-L913", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RoguelikeDungeonActivityDetailInfoOuterClass.RoguelikeDungeonActivityDetailInfo.getRuneListList", "code": "@java.lang.Override\n public java.util.List\n getRuneListList() {\n return runeList_;\n }", "docstring": "/**\n * repeated uint32 rune_list = 3;\n * @return A list containing the runeList.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/RoguelikeDungeonActivityDetailInfoOuterClass.java#L364-L368", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RoguelikeDungeonActivityDetailInfo.Builder.setContentCloseTime", "code": "public Builder setContentCloseTime(int value) {\n \n contentCloseTime_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * uint32 content_close_time = 11;\n * @param value The contentCloseTime to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/RoguelikeDungeonActivityDetailInfoOuterClass.java#L1291-L1296", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TreasureSeelieActivityDetailInfo.Builder.getRegionDataListBuilder", "code": "public emu.grasscutter.net.proto.TreasureSeelieRegionDataOuterClass.TreasureSeelieRegionData.Builder getRegionDataListBuilder(\n int index) {\n return getRegionDataListFieldBuilder().getBuilder(index);\n }", "docstring": "/**\n * repeated .TreasureSeelieRegionData region_data_list = 6;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/TreasureSeelieActivityDetailInfoOuterClass.java#L837-L840", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BounceConjuringActivityDetailInfo.Builder.addChapterInfoListBuilder", "code": "public emu.grasscutter.net.proto.BounceConjuringChapterInfoOuterClass.BounceConjuringChapterInfo.Builder addChapterInfoListBuilder(\n int index) {\n return getChapterInfoListFieldBuilder().addBuilder(\n index, emu.grasscutter.net.proto.BounceConjuringChapterInfoOuterClass.BounceConjuringChapterInfo.getDefaultInstance());\n }", "docstring": "/**\n * repeated .BounceConjuringChapterInfo chapter_info_list = 15;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/BounceConjuringActivityDetailInfoOuterClass.java#L841-L845", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MEIDIANHEDD.Builder.addStageInfoList", "code": "public Builder addStageInfoList(\n int index, emu.grasscutter.net.proto.OMNAAMAIBOHOuterClass.OMNAAMAIBOH.Builder builderForValue) {\n if (stageInfoListBuilder_ == null) {\n ensureStageInfoListIsMutable();\n stageInfoList_.add(index, builderForValue.build());\n onChanged();\n } else {\n stageInfoListBuilder_.addMessage(index, builderForValue.build());\n }\n return this;\n }", "docstring": "/**\n * repeated .OMNAAMAIBOH stageInfoList = 12;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/MEIDIANHEDDOuterClass.java#L641-L651", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SamLampInfoOuterClass.SamLampInfo.getProgress", "code": "@java.lang.Override\n public int getProgress() {\n return progress_;\n }", "docstring": "/**\n * uint32 progress = 3;\n * @return The progress.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/SamLampInfoOuterClass.java#L255-L258", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SumoStageData.Builder.setTeamList", "code": "public Builder setTeamList(\n int index, emu.grasscutter.net.proto.SumoTeamDataOuterClass.SumoTeamData value) {\n if (teamListBuilder_ == null) {\n if (value == null) {\n throw new NullPointerException();\n }\n ensureTeamListIsMutable();\n teamList_.set(index, value);\n onChanged();\n } else {\n teamListBuilder_.setMessage(index, value);\n }\n return this;\n }", "docstring": "/**\n * repeated .SumoTeamData team_list = 1;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/SumoStageDataOuterClass.java#L782-L795", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WinterCampInfo.Builder.setRaceInfoList", "code": "public Builder setRaceInfoList(\n int index, emu.grasscutter.net.proto.WinterCampRaceStageInfoOuterClass.WinterCampRaceStageInfo.Builder builderForValue) {\n if (raceInfoListBuilder_ == null) {\n ensureRaceInfoListIsMutable();\n raceInfoList_.set(index, builderForValue.build());\n onChanged();\n } else {\n raceInfoListBuilder_.setMessage(index, builderForValue.build());\n }\n return this;\n }", "docstring": "/**\n * repeated .WinterCampRaceStageInfo raceInfoList = 5;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/WinterCampInfoOuterClass.java#L1263-L1273", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WinterCampInfo.Builder.getExploreInfo", "code": "public emu.grasscutter.net.proto.WinterCampStageInfoOuterClass.WinterCampStageInfo getExploreInfo() {\n if (exploreInfoBuilder_ == null) {\n return exploreInfo_ == null ? emu.grasscutter.net.proto.WinterCampStageInfoOuterClass.WinterCampStageInfo.getDefaultInstance() : exploreInfo_;\n } else {\n return exploreInfoBuilder_.getMessage();\n }\n }", "docstring": "/**\n * .WinterCampStageInfo exploreInfo = 1;\n * @return The exploreInfo.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/WinterCampInfoOuterClass.java#L1853-L1859", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AsterActivityDetailInfoOuterClass.AsterActivityDetailInfo.getUnk3300KFDGFFPJGJC", "code": "@java.lang.Override\n public int getUnk3300KFDGFFPJGJC() {\n return unk3300KFDGFFPJGJC_;\n }", "docstring": "/**\n * uint32 Unk3300_KFDGFFPJGJC = 4;\n * @return The unk3300KFDGFFPJGJC.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/AsterActivityDetailInfoOuterClass.java#L356-L359", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ItemAddHintNotify.Builder.setUnk3300KIOMJNHFMDE", "code": "public Builder setUnk3300KIOMJNHFMDE(boolean value) {\n \n unk3300KIOMJNHFMDE_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * bool Unk3300_KIOMJNHFMDE = 11;\n * @param value The unk3300KIOMJNHFMDE to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ItemAddHintNotifyOuterClass.java#L1800-L1805", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OpActivityInfo.Builder.setIsHasChange", "code": "public Builder setIsHasChange(boolean value) {\n \n isHasChange_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * bool isHasChange = 3;\n * @param value The isHasChange to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/OpActivityInfoOuterClass.java#L845-L850", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OpActivityInfo.Builder.getBonusInfoOrBuilder", "code": "@java.lang.Override\n public emu.grasscutter.net.proto.BonusInfoOuterClass.BonusInfoOrBuilder getBonusInfoOrBuilder() {\n if ((detailCase_ == 11) && (bonusInfoBuilder_ != null)) {\n return bonusInfoBuilder_.getMessageOrBuilder();\n } else {\n if (detailCase_ == 11) {\n return (emu.grasscutter.net.proto.BonusInfoOuterClass.BonusInfo) detail_;\n }\n return emu.grasscutter.net.proto.BonusInfoOuterClass.BonusInfo.getDefaultInstance();\n }\n }", "docstring": "/**\n * .BonusInfo bonus_info = 11;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/OpActivityInfoOuterClass.java#L970-L980", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PlantFlowerInfo.Builder.setDayIndex", "code": "public Builder setDayIndex(int value) {\n \n dayIndex_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * uint32 dayIndex = 8;\n * @param value The dayIndex to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/PlantFlowerInfoOuterClass.java#L1036-L1041", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WinterCampRecvItemNotify.Builder.getRecvItemData", "code": "public emu.grasscutter.net.proto.WinterCampRecvItemDataOuterClass.WinterCampRecvItemData getRecvItemData() {\n if (recvItemDataBuilder_ == null) {\n return recvItemData_ == null ? emu.grasscutter.net.proto.WinterCampRecvItemDataOuterClass.WinterCampRecvItemData.getDefaultInstance() : recvItemData_;\n } else {\n return recvItemDataBuilder_.getMessage();\n }\n }", "docstring": "/**\n * .WinterCampRecvItemData recvItemData = 5;\n * @return The recvItemData.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/WinterCampRecvItemNotifyOuterClass.java#L612-L618", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetAllMailRsp.Builder.setIsTruncated", "code": "public Builder setIsTruncated(boolean value) {\n \n isTruncated_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * bool isTruncated = 8;\n * @param value The isTruncated to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/GetAllMailRspOuterClass.java#L1084-L1089", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HomeBlockArrangementInfo.Builder.addDeployFurniureListBuilder", "code": "public emu.grasscutter.net.proto.HomeFurnitureDataOuterClass.HomeFurnitureData.Builder addDeployFurniureListBuilder(\n int index) {\n return getDeployFurniureListFieldBuilder().addBuilder(\n index, emu.grasscutter.net.proto.HomeFurnitureDataOuterClass.HomeFurnitureData.getDefaultInstance());\n }", "docstring": "/**\n * repeated .HomeFurnitureData deployFurniureList = 15;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/HomeBlockArrangementInfoOuterClass.java#L3508-L3512", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PlayerWidgetInfoOuterClass.PlayerWidgetInfo.getUid", "code": "@java.lang.Override\n public int getUid() {\n return uid_;\n }", "docstring": "/**\n * uint32 uid = 10;\n * @return The uid.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/PlayerWidgetInfoOuterClass.java#L193-L196", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VintageBoothUsedItemData.Builder.clearIsOpen", "code": "public Builder clearIsOpen() {\n \n isOpen_ = false;\n onChanged();\n return this;\n }", "docstring": "/**\n * bool isOpen = 2;\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/VintageBoothUsedItemDataOuterClass.java#L517-L522", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActivityInfoOuterClass.ActivityInfo.getLuminanceStoneChallengeInfoOrBuilder", "code": "@java.lang.Override\n public emu.grasscutter.net.proto.LuminanceStoneChallengeInfoOuterClass.LuminanceStoneChallengeInfoOrBuilder getLuminanceStoneChallengeInfoOrBuilder() {\n if (iKJIPGGMNBKCase_ == 142) {\n return (emu.grasscutter.net.proto.LuminanceStoneChallengeInfoOuterClass.LuminanceStoneChallengeInfo) iKJIPGGMNBK_;\n }\n return emu.grasscutter.net.proto.LuminanceStoneChallengeInfoOuterClass.LuminanceStoneChallengeInfo.getDefaultInstance();\n }", "docstring": "/**\n * .LuminanceStoneChallengeInfo luminance_stone_challenge_info = 142;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ActivityInfoOuterClass.java#L4751-L4757", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ActivityInfo.Builder.mergeIrodoriInfo", "code": "public Builder mergeIrodoriInfo(emu.grasscutter.net.proto.IrodoriInfoOuterClass.IrodoriInfo value) {\n if (irodoriInfoBuilder_ == null) {\n if (iKJIPGGMNBKCase_ == 1014 &&\n iKJIPGGMNBK_ != emu.grasscutter.net.proto.IrodoriInfoOuterClass.IrodoriInfo.getDefaultInstance()) {\n iKJIPGGMNBK_ = emu.grasscutter.net.proto.IrodoriInfoOuterClass.IrodoriInfo.newBuilder((emu.grasscutter.net.proto.IrodoriInfoOuterClass.IrodoriInfo) iKJIPGGMNBK_)\n .mergeFrom(value).buildPartial();\n } else {\n iKJIPGGMNBK_ = value;\n }\n onChanged();\n } else {\n if (iKJIPGGMNBKCase_ == 1014) {\n irodoriInfoBuilder_.mergeFrom(value);\n }\n irodoriInfoBuilder_.setMessage(value);\n }\n iKJIPGGMNBKCase_ = 1014;\n return this;\n }", "docstring": "/**\n * .IrodoriInfo irodori_info = 1014;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ActivityInfoOuterClass.java#L15674-L15692", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FungusTrainingProgressDetailOuterClass.FungusTrainingProgressDetail.getMonsterPoolPreviewList", "code": "@java.lang.Override\n public emu.grasscutter.net.proto.FungusTrainingPoolPreviewDetailOuterClass.FungusTrainingPoolPreviewDetail getMonsterPoolPreviewList(int index) {\n return monsterPoolPreviewList_.get(index);\n }", "docstring": "/**\n * repeated .FungusTrainingPoolPreviewDetail monsterPoolPreviewList = 9;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/FungusTrainingProgressDetailOuterClass.java#L384-L387", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EffigyActivityDetailInfo.Builder.addDailyInfoList", "code": "public Builder addDailyInfoList(\n int index, emu.grasscutter.net.proto.EffigyDailyInfoOuterClass.EffigyDailyInfo value) {\n if (dailyInfoListBuilder_ == null) {\n if (value == null) {\n throw new NullPointerException();\n }\n ensureDailyInfoListIsMutable();\n dailyInfoList_.add(index, value);\n onChanged();\n } else {\n dailyInfoListBuilder_.addMessage(index, value);\n }\n return this;\n }", "docstring": "/**\n * repeated .EffigyDailyInfo daily_info_list = 4;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/EffigyActivityDetailInfoOuterClass.java#L944-L957", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HomeLimitedShopBuyGoodsRspOuterClass.HomeLimitedShopBuyGoodsRsp.getBuyCount", "code": "@java.lang.Override\n public int getBuyCount() {\n return buyCount_;\n }", "docstring": "/**\n * uint32 buyCount = 11;\n * @return The buyCount.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/HomeLimitedShopBuyGoodsRspOuterClass.java#L389-L392", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CrystalLinkDungeonSettleInfoOuterClass.CrystalLinkDungeonSettleInfo.getDifficultyId", "code": "@java.lang.Override\n public int getDifficultyId() {\n return difficultyId_;\n }", "docstring": "/**\n * uint32 difficulty_id = 3;\n * @return The difficultyId.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/CrystalLinkDungeonSettleInfoOuterClass.java#L203-L206", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetCompoundDataRsp.Builder.setRetcode", "code": "public Builder setRetcode(int value) {\n \n retcode_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * int32 retcode = 2;\n * @param value The retcode to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/GetCompoundDataRspOuterClass.java#L817-L822", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MpPlayRewardInfo.Builder.setRemainUidList", "code": "public Builder setRemainUidList(\n int index, int value) {\n ensureRemainUidListIsMutable();\n remainUidList_.setInt(index, value);\n onChanged();\n return this;\n }", "docstring": "/**\n * repeated uint32 remainUidList = 2;\n * @param index The index to set the value at.\n * @param value The remainUidList to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/MpPlayRewardInfoOuterClass.java#L722-L728", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MusicGameStartRsp.Builder.setUgcGuid", "code": "public Builder setUgcGuid(long value) {\n \n ugcGuid_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * uint64 ugcGuid = 8;\n * @param value The ugcGuid to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/MusicGameStartRspOuterClass.java#L677-L682", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProfilePictureChangeNotify.Builder.getProfilePicture", "code": "public emu.grasscutter.net.proto.ProfilePictureOuterClass.ProfilePicture getProfilePicture() {\n if (profilePictureBuilder_ == null) {\n return profilePicture_ == null ? emu.grasscutter.net.proto.ProfilePictureOuterClass.ProfilePicture.getDefaultInstance() : profilePicture_;\n } else {\n return profilePictureBuilder_.getMessage();\n }\n }", "docstring": "/**\n * .ProfilePicture profilePicture = 13;\n * @return The profilePicture.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ProfilePictureChangeNotifyOuterClass.java#L620-L626", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SceneGadgetInfo.Builder.setGadgetState", "code": "public Builder setGadgetState(int value) {\n \n gadgetState_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * uint32 gadgetState = 6;\n * @param value The gadgetState to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/SceneGadgetInfoOuterClass.java#L3469-L3474", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GadgetInfoOuterClass.GadgetInfo.getStatueGadgetOrBuilder", "code": "@java.lang.Override\n public emu.grasscutter.net.proto.StatueGadgetInfoOuterClass.StatueGadgetInfoOrBuilder getStatueGadgetOrBuilder() {\n if (contentCase_ == 19) {\n return (emu.grasscutter.net.proto.StatueGadgetInfoOuterClass.StatueGadgetInfo) content_;\n }\n return emu.grasscutter.net.proto.StatueGadgetInfoOuterClass.StatueGadgetInfo.getDefaultInstance();\n }", "docstring": "/**\n * .StatueGadgetInfo statue_gadget = 19;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/GadgetInfoOuterClass.java#L1428-L1434", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GravenInnocenceDetailInfo.Builder.setPhotoInfo", "code": "public Builder setPhotoInfo(emu.grasscutter.net.proto.GravenInnocencePhotoInfoOuterClass.GravenInnocencePhotoInfo value) {\n if (photoInfoBuilder_ == null) {\n if (value == null) {\n throw new NullPointerException();\n }\n photoInfo_ = value;\n onChanged();\n } else {\n photoInfoBuilder_.setMessage(value);\n }\n\n return this;\n }", "docstring": "/**\n * .GravenInnocencePhotoInfo photo_info = 10;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/GravenInnocenceDetailInfoOuterClass.java#L1171-L1183", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GravenInnocenceDetailInfo.Builder.setPhotoInfo", "code": "public Builder setPhotoInfo(\n emu.grasscutter.net.proto.GravenInnocencePhotoInfoOuterClass.GravenInnocencePhotoInfo.Builder builderForValue) {\n if (photoInfoBuilder_ == null) {\n photoInfo_ = builderForValue.build();\n onChanged();\n } else {\n photoInfoBuilder_.setMessage(builderForValue.build());\n }\n\n return this;\n }", "docstring": "/**\n * .GravenInnocencePhotoInfo photo_info = 10;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/GravenInnocenceDetailInfoOuterClass.java#L1187-L1197", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RoguelikeDungeonSettleInfoOuterClass.RoguelikeDungeonSettleInfo.getStageId", "code": "@java.lang.Override\n public int getStageId() {\n return stageId_;\n }", "docstring": "/**\n * uint32 stageId = 10;\n * @return The stageId.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/RoguelikeDungeonSettleInfoOuterClass.java#L274-L277", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AvatarFollowRouteNotify.Builder.getClientParams", "code": "public java.lang.String getClientParams() {\n java.lang.Object ref = clientParams_;\n if (!(ref instanceof java.lang.String)) {\n com.google.protobuf.ByteString bs =\n (com.google.protobuf.ByteString) ref;\n java.lang.String s = bs.toStringUtf8();\n clientParams_ = s;\n return s;\n } else {\n return (java.lang.String) ref;\n }\n }", "docstring": "/**\n * string clientParams = 10;\n * @return The clientParams.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/AvatarFollowRouteNotifyOuterClass.java#L856-L867", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BlossomBriefInfoNotify.Builder.getBriefInfoListBuilder", "code": "public emu.grasscutter.net.proto.BlossomBriefInfoOuterClass.BlossomBriefInfo.Builder getBriefInfoListBuilder(\n int index) {\n return getBriefInfoListFieldBuilder().getBuilder(index);\n }", "docstring": "/**\n * repeated .BlossomBriefInfo briefInfoList = 9;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/BlossomBriefInfoNotifyOuterClass.java#L835-L838", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LaunchFireworksReq.Builder.setSchemeData", "code": "public Builder setSchemeData(emu.grasscutter.net.proto.FireworksLaunchSchemeDataOuterClass.FireworksLaunchSchemeData value) {\n if (schemeDataBuilder_ == null) {\n if (value == null) {\n throw new NullPointerException();\n }\n schemeData_ = value;\n onChanged();\n } else {\n schemeDataBuilder_.setMessage(value);\n }\n\n return this;\n }", "docstring": "/**\n * .FireworksLaunchSchemeData schemeData = 1;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/LaunchFireworksReqOuterClass.java#L630-L642", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MonsterPointArrayRouteUpdateNotifyOuterClass.MonsterPointArrayRouteUpdateNotify.getMonsterRouteOrBuilder", "code": "@java.lang.Override\n public emu.grasscutter.net.proto.MonsterRouteOuterClass.MonsterRouteOrBuilder getMonsterRouteOrBuilder() {\n return getMonsterRoute();\n }", "docstring": "/**\n * .MonsterRoute monsterRoute = 2;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/MonsterPointArrayRouteUpdateNotifyOuterClass.java#L300-L303", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RogueDiaryInfoOuterClass.RogueDiaryInfo.getIsHaveProgress", "code": "@java.lang.Override\n public boolean getIsHaveProgress() {\n return isHaveProgress_;\n }", "docstring": "/**\n * bool isHaveProgress = 13;\n * @return The isHaveProgress.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/RogueDiaryInfoOuterClass.java#L269-L272", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerAnnounceRevokeNotify.Builder.addConfigIdList", "code": "public Builder addConfigIdList(int value) {\n ensureConfigIdListIsMutable();\n configIdList_.addInt(value);\n onChanged();\n return this;\n }", "docstring": "/**\n * repeated uint32 configIdList = 12;\n * @param value The configIdList to add.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ServerAnnounceRevokeNotifyOuterClass.java#L681-L686", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerBuffChangeNotifyOuterClass.ServerBuffChangeNotify.getServerBuffListOrBuilderList", "code": "@java.lang.Override\n public java.util.List \n getServerBuffListOrBuilderList() {\n return serverBuffList_;\n }", "docstring": "/**\n * repeated .ServerBuff serverBuffList = 12;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ServerBuffChangeNotifyOuterClass.java#L369-L373", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerBuffChangeNotifyOuterClass.ServerBuffChangeNotify.getAvatarGuidListCount", "code": "public int getAvatarGuidListCount() {\n return avatarGuidList_.size();\n }", "docstring": "/**\n * repeated uint64 avatarGuidList = 10;\n * @return The count of avatarGuidList.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ServerBuffChangeNotifyOuterClass.java#L412-L414", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ShopConcertProductOuterClass.ShopConcertProduct.getObtainLimit", "code": "@java.lang.Override\n public int getObtainLimit() {\n return obtainLimit_;\n }", "docstring": "/**\n * uint32 obtainLimit = 4;\n * @return The obtainLimit.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ShopConcertProductOuterClass.java#L284-L287", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DailyTaskFilterCityRsp.Builder.setRetcode", "code": "public Builder setRetcode(int value) {\n \n retcode_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * int32 retcode = 5;\n * @param value The retcode to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/DailyTaskFilterCityRspOuterClass.java#L495-L500", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetPlayerAskFriendListRspOuterClass.GetPlayerAskFriendListRsp.getAskFriendList", "code": "@java.lang.Override\n public emu.grasscutter.net.proto.FriendBriefOuterClass.FriendBrief getAskFriendList(int index) {\n return askFriendList_.get(index);\n }", "docstring": "/**\n * repeated .FriendBrief askFriendList = 6;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/GetPlayerAskFriendListRspOuterClass.java#L316-L319", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BlossomChestOuterClass.BlossomChest.getRemainUidListList", "code": "@java.lang.Override\n public java.util.List\n getRemainUidListList() {\n return remainUidList_;\n }", "docstring": "/**\n * repeated uint32 remainUidList = 3;\n * @return A list containing the remainUidList.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/BlossomChestOuterClass.java#L272-L276", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WinterCampAcceptItemResultInfo.Builder.clearUnk3300ILELLFNPDFJ", "code": "public Builder clearUnk3300ILELLFNPDFJ() {\n if (unk3300ILELLFNPDFJBuilder_ == null) {\n unk3300ILELLFNPDFJ_ = java.util.Collections.emptyList();\n bitField0_ = (bitField0_ & ~0x00000001);\n onChanged();\n } else {\n unk3300ILELLFNPDFJBuilder_.clear();\n }\n return this;\n }", "docstring": "/**\n * repeated .ItemParam Unk3300_ILELLFNPDFJ = 9;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/WinterCampAcceptItemResultInfoOuterClass.java#L872-L881", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BlitzRushInfoOuterClass.BlitzRushInfo.getParkourLevelInfoListOrBuilderList", "code": "@java.lang.Override\n public java.util.List \n getParkourLevelInfoListOrBuilderList() {\n return parkourLevelInfoList_;\n }", "docstring": "/**\n * repeated .ParkourLevelInfo parkourLevelInfoList = 6;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/BlitzRushInfoOuterClass.java#L258-L262", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HomeSceneArrangementInfo.Builder.clearMainHouse", "code": "public Builder clearMainHouse() {\n if (mainHouseBuilder_ == null) {\n mainHouse_ = null;\n onChanged();\n } else {\n mainHouse_ = null;\n mainHouseBuilder_ = null;\n }\n\n return this;\n }", "docstring": "/**\n * .HomeFurnitureData mainHouse = 6;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/HomeSceneArrangementInfoOuterClass.java#L1957-L1967", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VintageMarketInfo.Builder.addUnk3300ABHHACBDCFI", "code": "public Builder addUnk3300ABHHACBDCFI(int value) {\n ensureUnk3300ABHHACBDCFIIsMutable();\n unk3300ABHHACBDCFI_.addInt(value);\n onChanged();\n return this;\n }", "docstring": "/**\n * repeated uint32 Unk3300_ABHHACBDCFI = 3;\n * @param value The unk3300ABHHACBDCFI to add.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/VintageMarketInfoOuterClass.java#L2282-L2287", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VintageMarketInfo.Builder.clearUnk3300KJDEFIBKBPE", "code": "public Builder clearUnk3300KJDEFIBKBPE() {\n \n unk3300KJDEFIBKBPE_ = false;\n onChanged();\n return this;\n }", "docstring": "/**\n * bool Unk3300_KJDEFIBKBPE = 5;\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/VintageMarketInfoOuterClass.java#L2415-L2420", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MaterialOuterClass.Material.getCount", "code": "@java.lang.Override\n public int getCount() {\n return count_;\n }", "docstring": "/**\n * uint32 count = 1;\n * @return The count.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/MaterialOuterClass.java#L143-L146", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PlayerDataNotify.Builder.putPropMap", "code": "public Builder putPropMap(\n int key,\n emu.grasscutter.net.proto.PropValueOuterClass.PropValue value) {\n \n if (value == null) { throw new java.lang.NullPointerException(); }\n internalGetMutablePropMap().getMutableMap()\n .put(key, value);\n return this;\n }", "docstring": "/**\n * map<uint32, .PropValue> propMap = 5;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/PlayerDataNotifyOuterClass.java#L1176-L1184", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PlayerLoginReq.Builder.setClientVerisonHash", "code": "public Builder setClientVerisonHash(\n java.lang.String value) {\n if (value == null) {\n throw new NullPointerException();\n }\n \n clientVerisonHash_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * string clientVerisonHash = 1765;\n * @param value The clientVerisonHash to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/PlayerLoginReqOuterClass.java#L4211-L4220", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnchorPointDataOuterClass.AnchorPointData.getRot", "code": "@java.lang.Override\n public emu.grasscutter.net.proto.VectorOuterClass.Vector getRot() {\n return rot_ == null ? emu.grasscutter.net.proto.VectorOuterClass.Vector.getDefaultInstance() : rot_;\n }", "docstring": "/**\n * .Vector rot = 13;\n * @return The rot.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/AnchorPointDataOuterClass.java#L212-L215", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RegionInfo.Builder.setResVersionConfig", "code": "public Builder setResVersionConfig(\n emu.grasscutter.net.proto.ResVersionConfigOuterClass.ResVersionConfig.Builder builderForValue) {\n if (resVersionConfigBuilder_ == null) {\n resVersionConfig_ = builderForValue.build();\n onChanged();\n } else {\n resVersionConfigBuilder_.setMessage(builderForValue.build());\n }\n\n return this;\n }", "docstring": "/**\n * .ResVersionConfig res_version_config = 22;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/RegionInfoOuterClass.java#L3421-L3431", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StopServerInfo.Builder.getContentMsg", "code": "public java.lang.String getContentMsg() {\n java.lang.Object ref = contentMsg_;\n if (!(ref instanceof java.lang.String)) {\n com.google.protobuf.ByteString bs =\n (com.google.protobuf.ByteString) ref;\n java.lang.String s = bs.toStringUtf8();\n contentMsg_ = s;\n return s;\n } else {\n return (java.lang.String) ref;\n }\n }", "docstring": "/**\n * string contentMsg = 4;\n * @return The contentMsg.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/StopServerInfoOuterClass.java#L752-L763", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetMailItemRsp.Builder.setItemList", "code": "public Builder setItemList(\n int index, emu.grasscutter.net.proto.EquipParamOuterClass.EquipParam.Builder builderForValue) {\n if (itemListBuilder_ == null) {\n ensureItemListIsMutable();\n itemList_.set(index, builderForValue.build());\n onChanged();\n } else {\n itemListBuilder_.setMessage(index, builderForValue.build());\n }\n return this;\n }", "docstring": "/**\n * repeated .EquipParam itemList = 9;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/GetMailItemRspOuterClass.java#L865-L875", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AvatarInfo.Builder.setExpeditionStateValue", "code": "public Builder setExpeditionStateValue(int value) {\n \n expeditionState_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * .AvatarExpeditionState expeditionState = 16;\n * @param value The enum numeric value on the wire for expeditionState to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/AvatarInfoOuterClass.java#L3897-L3902", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EvtDoSkillSuccNotify.Builder.setForwardTypeValue", "code": "public Builder setForwardTypeValue(int value) {\n \n forwardType_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * .ForwardType forwardType = 5;\n * @param value The enum numeric value on the wire for forwardType to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/EvtDoSkillSuccNotifyOuterClass.java#L781-L786", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EntityClientExtraInfo.Builder.getSkillAnchorPositionFieldBuilder", "code": "private com.google.protobuf.SingleFieldBuilderV3<\n emu.grasscutter.net.proto.VectorOuterClass.Vector, emu.grasscutter.net.proto.VectorOuterClass.Vector.Builder, emu.grasscutter.net.proto.VectorOuterClass.VectorOrBuilder> \n getSkillAnchorPositionFieldBuilder() {\n if (skillAnchorPositionBuilder_ == null) {\n skillAnchorPositionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<\n emu.grasscutter.net.proto.VectorOuterClass.Vector, emu.grasscutter.net.proto.VectorOuterClass.Vector.Builder, emu.grasscutter.net.proto.VectorOuterClass.VectorOrBuilder>(\n getSkillAnchorPosition(),\n getParentForChildren(),\n isClean());\n skillAnchorPosition_ = null;\n }\n return skillAnchorPositionBuilder_;\n }", "docstring": "/**\n * .Vector skillAnchorPosition = 1;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/EntityClientExtraInfoOuterClass.java#L572-L584", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SceneAvatarInfo.Builder.getReliquaryListBuilderList", "code": "public java.util.List \n getReliquaryListBuilderList() {\n return getReliquaryListFieldBuilder().getBuilderList();\n }", "docstring": "/**\n * repeated .SceneReliquaryInfo reliquaryList = 9;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/SceneAvatarInfoOuterClass.java#L2753-L2756", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SceneAvatarInfo.Builder.clearCurVehicleInfo", "code": "public Builder clearCurVehicleInfo() {\n if (curVehicleInfoBuilder_ == null) {\n curVehicleInfo_ = null;\n onChanged();\n } else {\n curVehicleInfo_ = null;\n curVehicleInfoBuilder_ = null;\n }\n\n return this;\n }", "docstring": "/**\n * .CurVehicleInfo curVehicleInfo = 20;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/SceneAvatarInfoOuterClass.java#L3622-L3632", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MarkMapReq.Builder.setMark", "code": "public Builder setMark(emu.grasscutter.net.proto.MapMarkPointOuterClass.MapMarkPoint value) {\n if (markBuilder_ == null) {\n if (value == null) {\n throw new NullPointerException();\n }\n mark_ = value;\n onChanged();\n } else {\n markBuilder_.setMessage(value);\n }\n\n return this;\n }", "docstring": "/**\n * .MapMarkPoint mark = 10;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/MarkMapReqOuterClass.java#L927-L939", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IrodoriChessEntranceInfo.Builder.addLhjhgbpjpkl", "code": "public Builder addLhjhgbpjpkl(\n int index, emu.grasscutter.net.proto.IrodoriChessMonsterInfoOuterClass.IrodoriChessMonsterInfo value) {\n if (lhjhgbpjpklBuilder_ == null) {\n if (value == null) {\n throw new NullPointerException();\n }\n ensureLhjhgbpjpklIsMutable();\n lhjhgbpjpkl_.add(index, value);\n onChanged();\n } else {\n lhjhgbpjpklBuilder_.addMessage(index, value);\n }\n return this;\n }", "docstring": "/**\n * repeated .IrodoriChessMonsterInfo lhjhgbpjpkl = 8;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/IrodoriChessEntranceInfoOuterClass.java#L680-L693", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChapterStateNotify.Builder.hasNeedPlayerLevel", "code": "public boolean hasNeedPlayerLevel() {\n return needPlayerLevelBuilder_ != null || needPlayerLevel_ != null;\n }", "docstring": "/**\n * .ChapterStateNotify.NeedPlayerLevel needPlayerLevel = 2;\n * @return Whether the needPlayerLevel field is set.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/ChapterStateNotifyOuterClass.java#L1899-L1901", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MailData.Builder.clearMailId", "code": "public Builder clearMailId() {\n \n mailId_ = 0;\n onChanged();\n return this;\n }", "docstring": "/**\n * uint32 mailId = 1;\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/MailDataOuterClass.java#L1061-L1066", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BartenderInfoOuterClass.BartenderInfo.getUnlockTaskListCount", "code": "@java.lang.Override\n public int getUnlockTaskListCount() {\n return unlockTaskList_.size();\n }", "docstring": "/**\n * repeated .BartenderTaskInfo unlockTaskList = 15;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/BartenderInfoOuterClass.java#L299-L302", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AsterInfo.Builder.clearAsterMid", "code": "public Builder clearAsterMid() {\n if (asterMidBuilder_ == null) {\n asterMid_ = null;\n onChanged();\n } else {\n asterMid_ = null;\n asterMidBuilder_ = null;\n }\n\n return this;\n }", "docstring": "/**\n * .AsterMidDetailInfo asterMid = 4;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/AsterInfoOuterClass.java#L1350-L1360", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CustomGadgetTreeInfo.Builder.getNodeListBuilderList", "code": "public java.util.List \n getNodeListBuilderList() {\n return getNodeListFieldBuilder().getBuilderList();\n }", "docstring": "/**\n * repeated .CustomCommonNodeInfo nodeList = 1;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/CustomGadgetTreeInfoOuterClass.java#L739-L742", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TrackingIOInfo.Builder.setUnk3300NMDNLHLAKAMBytes", "code": "public Builder setUnk3300NMDNLHLAKAMBytes(\n com.google.protobuf.ByteString value) {\n if (value == null) {\n throw new NullPointerException();\n }\n checkByteStringIsUtf8(value);\n \n unk3300NMDNLHLAKAM_ = value;\n onChanged();\n return this;\n }", "docstring": "/**\n * string Unk3300_NMDNLHLAKAM = 11;\n * @param value The bytes for unk3300NMDNLHLAKAM to set.\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/TrackingIOInfoOuterClass.java#L1193-L1203", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WinterCampActivityDetailInfo.Builder.addUsedItemList", "code": "public Builder addUsedItemList(\n int index, emu.grasscutter.net.proto.ItemParamOuterClass.ItemParam.Builder builderForValue) {\n if (usedItemListBuilder_ == null) {\n ensureUsedItemListIsMutable();\n usedItemList_.add(index, builderForValue.build());\n onChanged();\n } else {\n usedItemListBuilder_.addMessage(index, builderForValue.build());\n }\n return this;\n }", "docstring": "/**\n * repeated .ItemParam used_item_list = 14;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/WinterCampActivityDetailInfoOuterClass.java#L1711-L1721", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WinterCampRecvItemData.Builder.addItemList", "code": "public Builder addItemList(\n int index, emu.grasscutter.net.proto.ItemParamOuterClass.ItemParam.Builder builderForValue) {\n if (itemListBuilder_ == null) {\n ensureItemListIsMutable();\n itemList_.add(index, builderForValue.build());\n onChanged();\n } else {\n itemListBuilder_.addMessage(index, builderForValue.build());\n }\n return this;\n }", "docstring": "/**\n * repeated .ItemParam itemList = 9;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/WinterCampRecvItemDataOuterClass.java#L838-L848", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GeneralRewardInfo.Builder.clearItemParam", "code": "public Builder clearItemParam() {\n if (itemParamBuilder_ == null) {\n itemParam_ = null;\n onChanged();\n } else {\n itemParam_ = null;\n itemParamBuilder_ = null;\n }\n\n return this;\n }", "docstring": "/**\n * .ItemParam itemParam = 5;\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/GeneralRewardInfoOuterClass.java#L1069-L1079", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LanternProjectionInfo.Builder.clearUnk3300FAIAIJGCBJG", "code": "public Builder clearUnk3300FAIAIJGCBJG() {\n unk3300FAIAIJGCBJG_ = java.util.Collections.emptyList();\n bitField0_ = (bitField0_ & ~0x00000001);\n onChanged();\n return this;\n }", "docstring": "/**\n * repeated .ClientInputType Unk3300_FAIAIJGCBJG = 2;\n * @return This builder for chaining.\n */", "url": "https://github.com/AngelicPretty/Gshine-Server/blob/bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755/src/generated/main/java/emu/grasscutter/net/proto/LanternProjectionInfoOuterClass.java#L1022-L1027", "sha": "bd9801c5e1d4f0d3a0d6cade7cad5f97eb71d755"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReflectUtils.makeAccessible", "code": "public static void makeAccessible(Field field) {\n if ((!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers())\n || Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) {\n field.setAccessible(true);\n }\n }", "docstring": "/**\n * 改变private/protected的成员变量为public,尽量不调用实际改动的语句,避免JDK的SecurityManager抱怨。\n */", "url": "https://github.com/oddfar/campus-example/blob/4f4b89f97e9d0eb17ae9ff86dcc9a928578aade6/campus-common/src/main/java/com/oddfar/campus/common/utils/reflect/ReflectUtils.java#L254-L259", "sha": "4f4b89f97e9d0eb17ae9ff86dcc9a928578aade6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SysMenuServiceImpl.getChildPerms", "code": "public List getChildPerms(List list, int parentId) {\n List returnList = new ArrayList();\n for (Iterator iterator = list.iterator(); iterator.hasNext(); ) {\n SysMenuEntity t = (SysMenuEntity) iterator.next();\n // 一、根据传入的某个父节点ID,遍历该父节点的所有子节点\n if (t.getParentId() == parentId) {\n recursionFn(list, t);\n returnList.add(t);\n }\n }\n return returnList;\n }", "docstring": "/**\n * 根据父节点的ID获取所有子节点\n *\n * @param list 分类表\n * @param parentId 传入的父节点ID\n * @return String\n */", "url": "https://github.com/oddfar/campus-example/blob/4f4b89f97e9d0eb17ae9ff86dcc9a928578aade6/campus-framework/src/main/java/com/oddfar/campus/framework/service/impl/SysMenuServiceImpl.java#L371-L382", "sha": "4f4b89f97e9d0eb17ae9ff86dcc9a928578aade6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Utility.readConfirmSelection", "code": "public static char readConfirmSelection() {\n System.out.print(\"确认是否预订(Y/N): \");\n char c;\n for (; ; ) {//无限循环\n //在这里,将接受到字符,转成了大写字母\n //y => Y n=>N\n String str = readKeyBoard(1, false).toUpperCase();\n c = str.charAt(0);\n if (c == 'Y' || c == 'N') {\n break;\n } else {\n System.out.print(\"选择错误,请重新输入:\");\n }\n }\n return c;\n }", "docstring": "/**\n * 功能:读取键盘输入的确认选项,Y或N\n * 将小的功能,封装到一个方法中.\n * @return Y或N\n */", "url": "https://github.com/luoye6/Vue_BookManageSystem_backend/blob/59de5993f9559d96825e22cf1318b60edc30c815/src/main/java/com/book/backend/utils/Utility.java#L139-L154", "sha": "59de5993f9559d96825e22cf1318b60edc30c815"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Accumulators.min", "code": "public static BsonField min(final SFunction fieldName, final SFunction expression) {\n return min(fieldName.getFieldNameLine(),expression.getFieldNameLineOption());\n }", "docstring": "/**\n * 获取 $group 操作的字段名称,该字段名称表示应用于组内所有成员时给定表达式的最小值.\n *\n * @param fieldName {@link BsonField} 名称\n * @param expression 表达式\n * @return {@link BsonField}\n * @since mongodb.driver.manual reference/operator/aggregation/min/ $min\n */", "url": "https://github.com/anwena/MongoPlus/blob/26f982a11cc072310dbd0e02fb33275cf83c1280/mongo-plus-core/src/main/java/com/anwen/mongo/aggregate/pipeline/Accumulators.java#L841-L843", "sha": "26f982a11cc072310dbd0e02fb33275cf83c1280"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FillField.linear", "code": "public static LinearFillOutputField linear(final String field) {\n return new FillField(notNull(\"field\", field),\n new Document(\"method\", \"linear\"));\n }", "docstring": "/**\n * 返回一个使用 {@link WindowOutputFields#linearFill(String, Object) Linear} 方法的 {@link FillOutputField}.\n *

\n * {@linkplain FillOptions#sortBy(Bson) Sorting} 是必需的。

\n *\n * @param field 要填写的字段.\n * @return 请求的 {@link FillOutputField}.\n * @since mongodb.driver.manual core/document/#dot-notation Dot notation\n */", "url": "https://github.com/anwena/MongoPlus/blob/26f982a11cc072310dbd0e02fb33275cf83c1280/mongo-plus-core/src/main/java/com/anwen/mongo/aggregate/pipeline/FillField.java#L96-L99", "sha": "26f982a11cc072310dbd0e02fb33275cf83c1280"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Indexes.text", "code": "public static Bson text(final SFunction fieldName) {\n notNull(\"fieldName\", fieldName);\n return new BsonDocument(fieldName.getFieldNameLine(), new BsonString(\"text\"));\n }", "docstring": "/**\n * 为给定字段上的文本索引创建索引键。\n *\n * @param fieldName 要在其上创建文本索引的字段\n * @return 索引详述\n */", "url": "https://github.com/anwena/MongoPlus/blob/26f982a11cc072310dbd0e02fb33275cf83c1280/mongo-plus-core/src/main/java/com/anwen/mongo/model/Indexes.java#L246-L249", "sha": "26f982a11cc072310dbd0e02fb33275cf83c1280"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QueryColumn.likeRaw", "code": "public QueryCondition likeRaw(Object value, BooleanSupplier isEffective) {\n return likeRaw(value, isEffective.getAsBoolean());\n }", "docstring": "/**\n * {@code LIKE value}\n */", "url": "https://github.com/mybatis-flex/mybatis-flex/blob/0c4b0c22666f4c54275fa64b25a2780a7c72cfb5/mybatis-flex-core/src/main/java/com/mybatisflex/core/query/QueryColumn.java#L738-L740", "sha": "0c4b0c22666f4c54275fa64b25a2780a7c72cfb5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QueryMethods.distinct", "code": "public static DistinctQueryColumn distinct(QueryColumn... columns) {\n return new DistinctQueryColumn(columns);\n }", "docstring": "/**\n * 对指定列进行去重。\n */", "url": "https://github.com/mybatis-flex/mybatis-flex/blob/0c4b0c22666f4c54275fa64b25a2780a7c72cfb5/mybatis-flex-core/src/main/java/com/mybatisflex/core/query/QueryMethods.java#L2352-L2354", "sha": "0c4b0c22666f4c54275fa64b25a2780a7c72cfb5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QueryMethods.selectCountOne", "code": "public static QueryWrapper selectCountOne() {\n return select(count(new RawQueryColumn(\"1\")).as(\"temp_count_one\"));\n }", "docstring": "/**\n * SELECT COUNT(1) as temp_count_one FROM table\n */", "url": "https://github.com/mybatis-flex/mybatis-flex/blob/0c4b0c22666f4c54275fa64b25a2780a7c72cfb5/mybatis-flex-core/src/main/java/com/mybatisflex/core/query/QueryMethods.java#L2634-L2636", "sha": "0c4b0c22666f4c54275fa64b25a2780a7c72cfb5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QueryWrapper.ge", "code": "public QueryWrapper ge(LambdaGetter column, Object value) {\n and(QueryMethods.column(column).ge(value));\n return this;\n }", "docstring": "/**\n * 大于等于 {@code >=}\n *\n * @param column 列名, lambda 展示\n * @param value 值\n */", "url": "https://github.com/mybatis-flex/mybatis-flex/blob/0c4b0c22666f4c54275fa64b25a2780a7c72cfb5/mybatis-flex-core/src/main/java/com/mybatisflex/core/query/QueryWrapper.java#L1115-L1118", "sha": "0c4b0c22666f4c54275fa64b25a2780a7c72cfb5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Builder.setRenderGL20", "code": "public Builder setRenderGL20(Boolean renderGL20) {\n this.renderGL20 = renderGL20;\n return this;\n }", "docstring": "/**\n * 是否支持OpenGL2.0\n * @param renderGL20 null 表示都可以, true表示要支持,false表示不支持\n */", "url": "https://github.com/JonaNorman/HDRSample/blob/99278a14bee7b98310521629acacd79be192ea15/sample/src/main/java/com/norman/android/hdrsample/opengl/GLEnvConfigSimpleChooser.java#L93-L96", "sha": "99278a14bee7b98310521629acacd79be192ea15"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AllayNbtUtils.nbtToBase64", "code": "public static String nbtToBase64(NbtMap nbtMap) {\n try (ByteArrayOutputStream stream = new ByteArrayOutputStream(); NBTOutputStream writer = org.cloudburstmc.nbt.NbtUtils.createWriter(stream)) {\n writer.writeTag(nbtMap);\n return Base64.getEncoder().encodeToString(stream.toByteArray());\n } catch (Exception e) {\n // Handle exceptions accordingly\n log.error(\"Failed to convert NBT to Base64\", e);\n }\n return null;\n }", "docstring": "/**\n * Convert NBT to Base64.\n *\n * @param nbtMap the NBT map.\n *\n * @return the Base64 string.\n */", "url": "https://github.com/AllayMC/Allay/blob/f7fcf6d9183c5216e7321510a6cd2a5017d20ca9/api/src/main/java/org/allaymc/api/utils/AllayNbtUtils.java#L45-L54", "sha": "f7fcf6d9183c5216e7321510a6cd2a5017d20ca9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "JSONUtils.fromMap", "code": "public static Map fromMap(String json) {\n return GSON.fromJson(json, new TypeToken>() {\n }.getType());\n }", "docstring": "/**\n * Parse a JSON string to a map.\n *\n * @param json the JSON string.\n *\n * @return the map.\n */", "url": "https://github.com/AllayMC/Allay/blob/f7fcf6d9183c5216e7321510a6cd2a5017d20ca9/api/src/main/java/org/allaymc/api/utils/JSONUtils.java#L242-L245", "sha": "f7fcf6d9183c5216e7321510a6cd2a5017d20ca9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ParameterFormatter.countArgumentPlaceholders3", "code": "static int countArgumentPlaceholders3(final char[] messagePattern, final int length, final int[] indices) {\n int result = 0;\n boolean isEscaped = false;\n for (int i = 0; i < length - 1; i++) {\n final char curChar = messagePattern[i];\n if (curChar == ESCAPE_CHAR) {\n isEscaped = !isEscaped;\n } else if (curChar == DELIM_START) {\n if (!isEscaped && messagePattern[i + 1] == DELIM_STOP) {\n indices[result] = i;\n result++;\n i++;\n }\n isEscaped = false;\n } else {\n isEscaped = false;\n }\n }\n return result;\n }", "docstring": "/**\n * Counts the number of unescaped placeholders in the given messagePattern.\n *\n * @param messagePattern the message pattern to be analyzed.\n *\n * @return the number of unescaped placeholders.\n */", "url": "https://github.com/AllayMC/Allay/blob/f7fcf6d9183c5216e7321510a6cd2a5017d20ca9/api/src/main/java/org/allaymc/api/utils/ParameterFormatter.java#L120-L139", "sha": "f7fcf6d9183c5216e7321510a6cd2a5017d20ca9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ACTrie.initFailover", "code": "private void initFailover() {\n //第一层的fail指针指向root\n Queue queue = new LinkedList<>();\n Map children = root.getChildren();\n for (ACTrieNode node : children.values()) {\n node.setFailover(root);\n queue.offer(node);\n }\n //构建剩余层数节点的fail指针,利用层次遍历\n while (!queue.isEmpty()) {\n ACTrieNode parentNode = queue.poll();\n for (Map.Entry entry : parentNode.getChildren().entrySet()) {\n ACTrieNode childNode = entry.getValue();\n ACTrieNode failover = parentNode.getFailover();\n // 在树中找到以childNode为结尾的字符串的最长前缀匹配,failover指向了这个最长前缀匹配的父节点\n while (failover != null && (!failover.hasChild(entry.getKey()))) {\n failover = failover.getFailover();\n }\n //回溯到了root节点\n if (failover == null) {\n childNode.setFailover(root);\n } else {\n // 更新当前节点的回退指针\n childNode.setFailover(failover.childOf(entry.getKey()));\n }\n queue.offer(childNode);\n }\n }\n }", "docstring": "/**\n * 初始化节点中的回退指针\n */", "url": "https://github.com/zongzibinbin/MallChat/blob/f2563328d4a5311a2308079025da4e637437759b/mallchat-chat-server/src/main/java/com/abin/mallchat/common/common/algorithm/sensitiveWord/ac/ACTrie.java#L45-L73", "sha": "f2563328d4a5311a2308079025da4e637437759b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnnotationConfigApplicationContext.injectProperties", "code": "void injectProperties(BeanDefinition def, Class clazz, Object bean) throws ReflectiveOperationException {\n // 在当前类查找Field和Method并注入:\n for (Field f : clazz.getDeclaredFields()) {\n tryInjectProperties(def, clazz, bean, f);\n }\n for (Method m : clazz.getDeclaredMethods()) {\n tryInjectProperties(def, clazz, bean, m);\n }\n // 在父类查找Field和Method并注入:\n Class superClazz = clazz.getSuperclass();\n if (superClazz != null) {\n injectProperties(def, superClazz, bean);\n }\n }", "docstring": "/**\n * 注入属性\n */", "url": "https://github.com/michaelliao/summer-framework/blob/087b7a98cbbb8413db48ec4774a87c6f6bc47ef4/step-by-step/bean-post-processor/src/main/java/com/itranswarp/summer/context/AnnotationConfigApplicationContext.java#L333-L346", "sha": "087b7a98cbbb8413db48ec4774a87c6f6bc47ef4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ApkUtils.install", "code": "public static boolean install(Context context, File apkFile) throws IOException {\n return isSupportSilentInstall() ? install(context, apkFile.getCanonicalPath()) : installNormal(context, apkFile.getCanonicalPath());\n }", "docstring": "/**\n * 自适应apk安装(如果设备有root权限就自动静默安装)\n *\n * @param context\n * @param apkFile apk文件\n * @return\n */", "url": "https://github.com/appspa/app-space-android/blob/83823d2bd073cf785cceeb4eabf6775e2e77f7f8/app-space-update/src/main/java/com/appspa/update/utils/ApkUtils.java#L85-L87", "sha": "83823d2bd073cf785cceeb4eabf6775e2e77f7f8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AbstractRegisterTypeStrategy.checkVerifyCode", "code": "public abstract void checkVerifyCode(String identity, String verifyCode);", "docstring": "/**\n * 校验验证码是否通过\n *\n * @param identity 用户账号,可能为空。一般邮箱情况下会为空,手机情况下不为空\n * @param verifyCode 邮箱策略时为邮箱验证码;手机策略时为手机短信验证码\n */", "url": "https://github.com/hncboy/ai-beehive/blob/a87a815b14e82bc68c9e682b2438a728105e21a7/beehive-web/src/main/java/com/hncboy/beehive/web/service/strategy/user/AbstractRegisterTypeStrategy.java#L72-L72", "sha": "a87a815b14e82bc68c9e682b2438a728105e21a7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SmsBanner.PrintBanner", "code": "public static void PrintBanner(String version) {\n System.out.println(BANNER +version);\n }", "docstring": "/** 初始化配置文件时打印banner*/", "url": "https://github.com/dromara/SMS4J/blob/298e7c48e5c50444a0d626c24e06ae2fe23102eb/sms4j-provider/src/main/java/org/dromara/sms4j/provider/config/SmsBanner.java#L14-L16", "sha": "298e7c48e5c50444a0d626c24e06ae2fe23102eb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TwitterMessage.getBCP47LanguageTag", "code": "public String getBCP47LanguageTag() {\n if (isSetLocale()) {\n return getLocale().toLanguageTag();\n } else {\n return null;\n }\n }", "docstring": "/**\n * Returns the IETF BCP 47 Language Tag of the locale. E.g. zh-CN\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/common/relevance/entities/TwitterMessage.java#L616-L622", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EarlybirdThriftDocumentUtil.replaceCreatedAtAndCreatedAtCSF", "code": "public static void replaceCreatedAtAndCreatedAtCSF(ThriftDocument document, int value) {\n removeField(document, EarlybirdFieldConstant.CREATED_AT_FIELD);\n removeField(document, EarlybirdFieldConstant.CREATED_AT_CSF_FIELD);\n\n addIntField(document, EarlybirdFieldConstant.CREATED_AT_FIELD, value);\n addIntField(document, EarlybirdFieldConstant.CREATED_AT_CSF_FIELD, value);\n }", "docstring": "/**\n * First remove the old timestamp if they exist.\n * Then add the created at and created at csf fields to the given thrift document.\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/common/schema/earlybird/EarlybirdThriftDocumentUtil.java#L201-L207", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LightweightLinearModel.createForSchemaBased", "code": "protected static LightweightLinearModel createForSchemaBased(\n String modelName,\n double bias,\n Map binaryFeaturesById,\n Map continuousFeaturesById,\n Map discretizedFeaturesById) {\n return new LightweightLinearModel(modelName, bias, true,\n null, null, null,\n binaryFeaturesById, continuousFeaturesById, discretizedFeaturesById);\n }", "docstring": "/**\n * Create model for schema-based features\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/common/util/ml/prediction_engine/LightweightLinearModel.java#L113-L122", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FacetAccumulator.add", "code": "public abstract int add(long termID, int scoreIncrement, int penaltyIncrement, int tweepCred);", "docstring": "/** Called to notify the accumulator that the given termID has occurred in a document\n * Returns the current count of the given termID.\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/core/earlybird/facets/FacetAccumulator.java#L14-L14", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IntBlockPoolPackedLongsReader.getPackedValueIndex", "code": "public int getPackedValueIndex() {\n return packedValueIndex;\n }", "docstring": "/**\n * A simple getter of {@link #packedValueIndex}.\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/core/earlybird/index/inverted/IntBlockPoolPackedLongsReader.java#L204-L206", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SkipListContainer.searchCeil", "code": "public int searchCeil(\n K key,\n int skipListHead,\n SkipListComparator comparator,\n @Nullable SkipListSearchFinger searchFinger) {\n assert comparator != null;\n\n // Perform regular search.\n final int foundPointer = search(key, skipListHead, comparator, searchFinger);\n\n // Return foundPointer if it is not the list head and the pointed value has key equal to the\n // given key; otherwise, return next pointer.\n if (foundPointer != skipListHead\n && comparator.compareKeyWithValue(key, getValue(foundPointer), INVALID_POSITION) == 0) {\n return foundPointer;\n } else {\n return getNextPointer(foundPointer);\n }\n }", "docstring": "/**\n * Ceil search on given {@param key}.\n *\n * @param key target key will be searched.\n * @param skipListHead index of the header tower of the skip list will be searched.\n * @param comparator comparator used for comparison when traversing through the skip list.\n * @param searchFinger {@link SkipListSearchFinger} to accelerate search speed.\n * @return index of the smallest value with key greater or equal to the given key.\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/core/earlybird/index/inverted/SkipListContainer.java#L267-L285", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SegmentManager.getSegmentMetadata", "code": "public List getSegmentMetadata() {\n List segmentMetadata = new ArrayList<>();\n for (SegmentInfo segment : getSegmentInfos(Filter.All, Order.OLD_TO_NEW)) {\n segmentMetadata.add(segment.getSegmentMetadata());\n }\n return segmentMetadata;\n }", "docstring": "/**\n * Gets metadata for segments for debugging purposes.\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/earlybird/partition/SegmentManager.java#L712-L718", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AntiGamingFilter.newMock", "code": "@VisibleForTesting\n public static AntiGamingFilter newMock(boolean alwaysValue) {\n return new AntiGamingFilter(alwaysValue) {\n @Override\n public void startSegment(EarlybirdIndexSegmentAtomicReader reader) {\n }\n };\n }", "docstring": "/**\n * Creates an AntiGamingFilter that either accepts or rejects tweets from all users.\n * This method should only be called in tests.\n *\n * @param alwaysValue Determines if tweets should always be accepted or rejected.\n * @return An AntiGamingFilter that either accepts or rejects tweets from all users.\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/earlybird/search/AntiGamingFilter.java#L84-L91", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TermCountMonitor.getFieldStats", "code": "private Map getFieldStats() {\n Iterable segmentInfos = segmentManager.getSegmentInfos(\n SegmentManager.Filter.Enabled, SegmentManager.Order.NEW_TO_OLD);\n Map rawCounts = new HashMap<>();\n\n ImmutableSchemaInterface schemaSnapshot =\n segmentManager.getEarlybirdIndexConfig().getSchema().getSchemaSnapshot();\n Set missingFieldsCandidates = schemaSnapshot\n .getFieldInfos()\n .stream()\n .filter(fieldInfo -> fieldInfo.getFieldType().indexOptions() != IndexOptions.NONE)\n .map(Schema.FieldInfo::getName)\n .collect(Collectors.toSet());\n int segmentCount = 0;\n for (SegmentInfo segmentInfo : segmentInfos) {\n segmentCount++;\n try {\n EarlybirdSingleSegmentSearcher searcher = segmentManager.getSearcher(\n segmentInfo.getTimeSliceID(), schemaSnapshot);\n if (searcher != null) {\n EarlybirdIndexSegmentAtomicReader reader = searcher.getTwitterIndexReader();\n for (Schema.FieldInfo fieldInfo : schemaSnapshot.getFieldInfos()) {\n if (fieldInfo.getFieldType().indexOptions() == IndexOptions.NONE) {\n continue;\n }\n\n String fieldName = fieldInfo.getName();\n RawFieldCounter count = rawCounts.get(fieldName);\n if (count == null) {\n count = new RawFieldCounter();\n rawCounts.put(fieldName, count);\n }\n Terms terms = reader.terms(fieldName);\n if (terms != null) {\n missingFieldsCandidates.remove(fieldName);\n count.numTerms.add(terms.size());\n long sumTotalTermFreq = terms.getSumTotalTermFreq();\n if (sumTotalTermFreq != -1) {\n count.numTokens.add(sumTotalTermFreq);\n }\n }\n }\n }\n } catch (Exception e) {\n LOG.error(\"Exception getting average term count per field: \" + segmentInfo, e);\n }\n }\n\n // Update missing fields stats.\n missingFieldsCandidates.forEach(\n field -> getOrCreateLongGauge(missingFields, field, missingFieldStatNameFunc).set(1));\n missingFields.keySet().stream()\n .filter(\n field -> !missingFieldsCandidates.contains(field))\n .forEach(\n field -> getOrCreateLongGauge(missingFields, field, missingFieldStatNameFunc).set(0));\n\n long totalTermCount = 0;\n long totalTokenCount = 0;\n if (segmentCount == 0) {\n LOG.error(\"No segments are found to calculate per-field term counts.\");\n } else {\n LOG.debug(\"TermCountMonitor.getPerFieldTermCount.segmentCount = {}\", segmentCount);\n LOG.debug(\" field: term count (average per segment)\");\n for (Map.Entry entry : rawCounts.entrySet()) {\n String field = entry.getKey();\n final long averageTermCount = entry.getValue().numTerms.longValue() / segmentCount;\n final long averageTokenCount = entry.getValue().numTokens.longValue() / segmentCount;\n totalTermCount += entry.getValue().numTerms.longValue();\n totalTokenCount += entry.getValue().numTokens.longValue();\n\n LOG.debug(\" '{} term': {}\", field, averageTermCount);\n LOG.debug(\" '{} token': {}\", field, averageTokenCount);\n\n entry.getValue().numTerms.setValue(averageTermCount);\n entry.getValue().numTokens.setValue(averageTokenCount);\n }\n }\n LOG.info(\"Total term count: {}\", totalTermCount);\n LOG.info(\"Total token count: {}\", totalTokenCount);\n this.termCountOnAllFields.set(totalTermCount);\n this.tokenCountOnAllFields.set(totalTokenCount);\n\n return rawCounts;\n }", "docstring": "/**\n * Loops through all segments, and for each field gets the average term/token count.\n * Based on that, returns a map from each field to its term/token count (average per segment).\n */", "url": "https://github.com/twitter/the-algorithm/blob/72eda9a24f815f6d566818cbf8518138e29d83e9/src/java/com/twitter/search/earlybird/util/TermCountMonitor.java#L238-L322", "sha": "72eda9a24f815f6d566818cbf8518138e29d83e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PlannerBenchmarkFactory.createFromFreemarkerXmlResource", "code": "public static @NonNull PlannerBenchmarkFactory createFromFreemarkerXmlResource(@NonNull String templateResource) {\n PlannerBenchmarkConfig benchmarkConfig = PlannerBenchmarkConfig.createFromFreemarkerXmlResource(templateResource);\n return new DefaultPlannerBenchmarkFactory(benchmarkConfig);\n }", "docstring": "/**\n * Reads a Freemarker template from the classpath that generates an XML benchmark configuration\n * and uses that {@link PlannerBenchmarkConfig} to build a {@link PlannerBenchmarkFactory}.\n * The generated XML root element must be {@code }.\n *\n * @param templateResource a classpath resource as defined by {@link ClassLoader#getResource(String)}\n * @see #createFromFreemarkerXmlResource(String)\n */", "url": "https://github.com/TimefoldAI/timefold-solver/blob/f67c507a421ee113dd2e76f825480aa058b14767/benchmark/src/main/java/ai/timefold/solver/benchmark/api/PlannerBenchmarkFactory.java#L157-L160", "sha": "f67c507a421ee113dd2e76f825480aa058b14767"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProblemBenchmarkResult.registerProblemSizeStatistics", "code": "public void registerProblemSizeStatistics(ProblemSizeStatistics problemSizeStatistics) {\n if (entityCount == null) {\n entityCount = problemSizeStatistics.entityCount();\n } else if (entityCount.longValue() != problemSizeStatistics.entityCount()) {\n LOGGER.warn(\"The problemBenchmarkResult ({}) has different entityCount values ([{},{}]).\\n\"\n + \"This is normally impossible for 1 inputSolutionFile.\",\n getName(), entityCount, problemSizeStatistics.entityCount());\n // The entityCount is not unknown (null), but known to be ambiguous\n entityCount = -1L;\n }\n if (variableCount == null) {\n variableCount = problemSizeStatistics.variableCount();\n } else if (variableCount.longValue() != problemSizeStatistics.variableCount()) {\n LOGGER.warn(\"The problemBenchmarkResult ({}) has different variableCount values ([{},{}]).\\n\"\n + \"This is normally impossible for 1 inputSolutionFile.\",\n getName(), variableCount, problemSizeStatistics.variableCount());\n // The variableCount is not unknown (null), but known to be ambiguous\n variableCount = -1L;\n }\n if (maximumValueCount == null) {\n maximumValueCount = problemSizeStatistics.approximateValueCount();\n } else if (maximumValueCount.longValue() != problemSizeStatistics.approximateValueCount()) {\n LOGGER.warn(\"The problemBenchmarkResult ({}) has different approximateValueCount values ([{},{}]).\\n\"\n + \"This is normally impossible for 1 inputSolutionFile.\",\n getName(), maximumValueCount, problemSizeStatistics.approximateValueCount());\n // The approximateValueCount is not unknown (null), but known to be ambiguous\n maximumValueCount = -1L;\n }\n if (problemScale == null) {\n problemScale = problemSizeStatistics.approximateProblemScaleLogAsFixedPointLong();\n } else if (problemScale.longValue() != problemSizeStatistics.approximateProblemScaleLogAsFixedPointLong()) {\n LOGGER.warn(\"The problemBenchmarkResult ({}) has different problemScale values ([{},{}]).\\n\"\n + \"This is normally impossible for 1 inputSolutionFile.\",\n getName(), problemScale, problemSizeStatistics.approximateProblemScaleLogAsFixedPointLong());\n // The problemScale is not unknown (null), but known to be ambiguous\n problemScale = -1L;\n }\n }", "docstring": "/**\n * HACK to avoid loading the problem just to extract its problemScale.\n * Called multiple times, for every {@link SingleBenchmarkResult} of this {@link ProblemBenchmarkResult}.\n *\n * @param problemSizeStatistics never null\n */", "url": "https://github.com/TimefoldAI/timefold-solver/blob/f67c507a421ee113dd2e76f825480aa058b14767/benchmark/src/main/java/ai/timefold/solver/benchmark/impl/result/ProblemBenchmarkResult.java#L427-L464", "sha": "f67c507a421ee113dd2e76f825480aa058b14767"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ConstraintCollectors.toMap", "code": "public static @NonNull BiConstraintCollector>> toMap(\n @NonNull BiFunction keyMapper,\n @NonNull BiFunction valueMapper) {\n return toMap(keyMapper, valueMapper, (IntFunction>) LinkedHashSet::new);\n }", "docstring": "/**\n * As defined by {@link #toMap(Function, Function)}.\n */", "url": "https://github.com/TimefoldAI/timefold-solver/blob/f67c507a421ee113dd2e76f825480aa058b14767/core/src/main/java/ai/timefold/solver/core/api/score/stream/ConstraintCollectors.java#L1418-L1422", "sha": "f67c507a421ee113dd2e76f825480aa058b14767"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "JobServiceImpl.searchJobs", "code": "@Override\n public JobsResponse searchJobs(JobsRequest request) throws Exception {\n Map termQuery = request.getTermQuery();\n Map sort = request.getSortOrder();\n Map rangeConditions = request.getRangeConditions();\n\n SearchSourceBuilder builder = openSearchService.genSearchBuilder(termQuery, rangeConditions, sort, null);\n Long count = openSearchService.count(builder, jobsIndex + \"-*\");\n builder.from(request.getFrom()).size(request.getSize());\n\n List items = openSearchService.find(JobAnalysis.class, builder, jobsIndex + \"-*\");\n List jobInfos = items.stream().map(data -> JobInfo.from(data, redisService.get(String.format(\"%s:%s:%s\", data.getProjectName(), data.getFlowName(), data.getTaskName())))).collect(Collectors.toList());\n\n JobsResponse response = new JobsResponse();\n response.setJobInfos(jobInfos);\n response.setCount(count);\n return response;\n }", "docstring": "/**\n * Search jobs\n */", "url": "https://github.com/cubefs/compass/blob/9d12a43fc9b5e41961f60643cf01f71f5679c5db/task-portal/src/main/java/com/oppo/cloud/portal/service/impl/JobServiceImpl.java#L90-L107", "sha": "9d12a43fc9b5e41961f60643cf01f71f5679c5db"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SysUserController.register", "code": "@PostMapping(\"/register\")\n public Result register(@RequestBody UserParam user){\n String pwd = new String(Base64.decodeBase64(user.getPassword()), StandardCharsets.UTF_8);\n String md5 = Md5Utils.hash(pwd).toUpperCase();\n user.setPassword(md5);\n BaseUser baseUser = new BaseUser();\n BeanUtils.copyProperties(user, baseUser);\n iBaseUserService.addUser(baseUser);\n return Result.success();\n }", "docstring": "/**\n * 用户注册\n * @param user\n * @return\n */", "url": "https://github.com/pingapi/crabc-api/blob/bdc48ac634b72b849402fa367c1057d626d6f2e2/crabc-boot/crabc-core/src/main/java/cn/crabc/core/app/controller/SysUserController.java#L107-L116", "sha": "bdc48ac634b72b849402fa367c1057d626d6f2e2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InterfaceChargingController.unlockAvailablePieces", "code": "@PostMapping(\"/unlockAvailablePieces\")\n public BaseResponse unlockAvailablePieces(@RequestBody LockChargingVo lockChargingVo){\n return interfaceChargingService.unlockAvailablePieces(lockChargingVo);\n }", "docstring": "/**\n * 解锁库存\n * @param lockChargingVo\n * @return\n */", "url": "https://github.com/YukeSeko/YukeSeko-Interface/blob/12f35f4828f2ed13edbc1ca004b2d71d427fcb49/api/src/main/java/com/wzy/api/controller/InterfaceChargingController.java#L26-L29", "sha": "12f35f4828f2ed13edbc1ca004b2d71d427fcb49"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PageUtils.defaultOrder", "code": "public static void defaultOrder(List orders, QueryWrapper queryWrapper, SFunction defaultColumnName) {\n if (CollUtil.isEmpty(orders)) {\n defaultOrderProcess(queryWrapper, defaultColumnName);\n return;\n }\n orders.forEach(orderBy -> {\n //默认时间倒序\n if (StringUtils.EMPTY.equals(orderBy.getColumnName())) {\n defaultOrderProcess(queryWrapper, defaultColumnName);\n } else {\n if (orderBy.isDesc()) {\n queryWrapper.orderByDesc(orderBy.getColumnName());\n } else {\n queryWrapper.orderByAsc(orderBy.getColumnName());\n }\n }\n });\n }", "docstring": "/**\n * 默认排序,可以指定默认排序字段\n *\n * @param orders 排序条件\n * @param queryWrapper 查询条件\n * @param defaultColumnName 指定默认排序字段列\n * @param T\n */", "url": "https://github.com/rule-engine/rule-engine-open/blob/817661279c5c706a79ac461601c1f9c80742ac9d/rule-engine-open-web/src/main/java/cn/ruleengine/web/util/PageUtils.java#L152-L169", "sha": "817661279c5c706a79ac461601c1f9c80742ac9d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Convert.str", "code": "public static String str(Object obj, Charset charset) {\n if (null == obj) {\n return null;\n }\n\n if (obj instanceof String) {\n return (String) obj;\n } else if (obj instanceof byte[]) {\n return str((byte[]) obj, charset);\n } else if (obj instanceof Byte[]) {\n byte[] bytes = ArrayUtils.toPrimitive((Byte[]) obj);\n return str(bytes, charset);\n } else if (obj instanceof ByteBuffer) {\n return str((ByteBuffer) obj, charset);\n }\n return obj.toString();\n }", "docstring": "/**\n * 将对象转为字符串
\n * 1、Byte数组和ByteBuffer会被转换为对应字符串的数组 2、对象数组会调用Arrays.toString方法\n *\n * @param obj 对象\n * @param charset 字符集\n * @return 字符串\n */", "url": "https://github.com/Ginyi3705/ginyi-spring-vue/blob/6c07298cd34c75cc803a61af22a0af2ce6e7cf01/ginyi-springboot/ginyi-common/ginyi-common-utils/src/main/java/ginyi/common/utils/text/Convert.java#L152-L168", "sha": "6c07298cd34c75cc803a61af22a0af2ce6e7cf01"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "KronotopInstance.getStatus", "code": "public KronotopInstanceStatus getStatus() {\n return status;\n }", "docstring": "/**\n * Retrieves the status of the Kronotop instance.\n *\n * @return the status of the Kronotop instance.\n */", "url": "https://github.com/kronotop/kronotop/blob/10b44c6dd4a39973beba45e0c2ccc73e88d21afa/kronotop/src/main/java/com/kronotop/instance/KronotopInstance.java#L322-L324", "sha": "10b44c6dd4a39973beba45e0c2ccc73e88d21afa"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RespResponse.writeInteger", "code": "@Override\n public void writeInteger(long value) {\n ctx.writeAndFlush(new IntegerRedisMessage(value));\n }", "docstring": "/**\n * Writes a long integer value as a Redis response message to the client.\n *\n * @param value the long integer value to be written\n * @throws NullPointerException if the value is null\n */", "url": "https://github.com/kronotop/kronotop/blob/10b44c6dd4a39973beba45e0c2ccc73e88d21afa/kronotop/src/main/java/com/kronotop/server/impl/RespResponse.java#L81-L84", "sha": "10b44c6dd4a39973beba45e0c2ccc73e88d21afa"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Watcher.watchKey", "code": "public Long watchKey(ChannelId channelId, String key) {\n AtomicLong version = new AtomicLong();\n watchedKeys.compute(key, (k, watchedKey) -> {\n if (watchedKey == null) {\n watchedKey = new WatchedKey();\n }\n version.set(watchedKey.getVersion());\n watchedKey.getChannels().add(channelId);\n return watchedKey;\n });\n return version.get();\n }", "docstring": "/**\n * Watches a key for changes and associates a given channelId with it.\n *\n * @param channelId The channelId to associate with the key\n * @param key The key to watch\n * @return The version of the watched key\n */", "url": "https://github.com/kronotop/kronotop/blob/10b44c6dd4a39973beba45e0c2ccc73e88d21afa/kronotop/src/main/java/com/kronotop/watcher/Watcher.java#L51-L62", "sha": "10b44c6dd4a39973beba45e0c2ccc73e88d21afa"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MigrateArgs.copy", "code": "public MigrateArgs copy() {\n this.copy = true;\n return this;\n }", "docstring": "/**\n * Do not remove the key from the local instance by setting {@code COPY}.\n *\n * @return {@code this} {@link MigrateArgs}.\n */", "url": "https://github.com/kronotop/kronotop/blob/10b44c6dd4a39973beba45e0c2ccc73e88d21afa/kronotop/src/test/java/com/kronotop/commandbuilder/redis/MigrateArgs.java#L51-L54", "sha": "10b44c6dd4a39973beba45e0c2ccc73e88d21afa"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Utility.readMenuSelection", "code": "public static char readMenuSelection() {\n char c;\n for (; ; ) {\n String str = readKeyBoard(1, false);//包含一个字符的字符串\n c = str.charAt(0);//将字符串转换成字符char类型\n if (c != '1' && c != '2' && \n c != '3' && c != '4' && c != '5') {\n System.out.print(\"选择错误,请重新输入:\");\n } else break;\n }\n return c;\n }", "docstring": "/**\n * 功能:读取键盘输入的一个菜单选项,值:1——5的范围\n * @return 1——5\n */", "url": "https://github.com/timerring/backend-tutorial/blob/75524f6ac554afe80f015163796026ed06e79aba/code/java-tutorial/chapter22/QQServer/src/com/hspedu/utils/Utility.java#L24-L35", "sha": "75524f6ac554afe80f015163796026ed06e79aba"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Utility.readMenuSelection", "code": "public static char readMenuSelection() {\n char c;\n for (; ; ) {\n String str = readKeyBoard(1, false);//包含一个字符的字符串\n c = str.charAt(0);//将字符串转换成字符char类型\n if (c != '1' && c != '2' && \n c != '3' && c != '4' && c != '5') {\n System.out.print(\"选择错误,请重新输入:\");\n } else break;\n }\n return c;\n }", "docstring": "/**\n * 功能:读取键盘输入的一个菜单选项,值:1——5的范围\n * @return 1——5\n */", "url": "https://github.com/timerring/backend-tutorial/blob/75524f6ac554afe80f015163796026ed06e79aba/code/java-tutorial/chapter26/mhl/src/com/hspedu/mhl/utils/Utility.java#L23-L34", "sha": "75524f6ac554afe80f015163796026ed06e79aba"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AuditChangeFactory.getDeleteChangeDescription", "code": "private String getDeleteChangeDescription(AuditableEntity p, String entityClass, StringBuilder change) {\n debug(\"[getDeleteChangeDescription] entityClass: {}\", entityClass);\n change.append(\"Deleted \").append(entityClass).append(\" \").append(p.toAuditString());\n return change.toString();\n }", "docstring": "/**\n * Creates change log for object deleted.\n */", "url": "https://github.com/openkoda/openkoda/blob/d86335959f5fb1da2fa58a9a5c7f02065d322975/openkoda/src/main/java/com/openkoda/core/audit/AuditChangeFactory.java#L90-L94", "sha": "d86335959f5fb1da2fa58a9a5c7f02065d322975"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Key.setName", "code": "public void setName(String name) {\n this.name = name;\n }", "docstring": "/**\n * \n */", "url": "https://github.com/libaibaia/cloudSec/blob/6bf12ac0d6428197d803efdcdb8e5bc18519dd9f/src/main/java/com/domain/Key.java#L301-L303", "sha": "6bf12ac0d6428197d803efdcdb8e5bc18519dd9f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Menu.getTitle", "code": "public String getTitle() {\n return title;\n }", "docstring": "/**\n * 标题\n */", "url": "https://github.com/libaibaia/cloudSec/blob/6bf12ac0d6428197d803efdcdb8e5bc18519dd9f/src/main/java/com/domain/Menu.java#L160-L162", "sha": "6bf12ac0d6428197d803efdcdb8e5bc18519dd9f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StringCase.toBigCamelCase", "code": "public static String toBigCamelCase(String str) {\n return toCamelCase(str, true);\n }", "docstring": "/**\n * 将下划线方式命名的字符串转换为大驼峰式。如果转换前的下划线大写方式命名的字符串为空,则返回空字符串。
\n * 例如:hello_world=》HelloWorld\n *\n * @param str 转换前的下划线大写方式命名的字符串\n * @return 转换后的驼峰式命名的字符串\n */", "url": "https://github.com/qaiu/netdisk-fast-download/blob/0637bcfd8ebe6c489726ddd500fdb0930ec7ec80/core/src/main/java/cn/qaiu/vx/core/util/StringCase.java#L73-L75", "sha": "0637bcfd8ebe6c489726ddd500fdb0930ec7ec80"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Command.isPragmaLine", "code": "public final boolean isPragmaLine() {\n\t\tif (!firstToken.isPragma())\n\t\t\treturn false;\n\t\t// starting from the second Token, check for pragmas and line breaks \n\t\tToken token = firstToken.getNext();\n\t\twhile (token != null && token.isPragma() && token.lineBreaks == 0) {\n\t\t\ttoken = token.getNext();\n\t\t}\n\t\treturn (token == null || token.isComment());\n\t}", "docstring": "/**\n\t * returns true if the Command only consists of one line with one or several pragmas (and potentially a line-end comment) \n\t */", "url": "https://github.com/SAP/abap-cleaner/blob/718bd300b9997ed43f6e82c7a993bc3797ad24ea/com.sap.adt.abapcleaner/src/com/sap/adt/abapcleaner/parser/Command.java#L2815-L2824", "sha": "718bd300b9997ed43f6e82c7a993bc3797ad24ea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PeriodicTask.Builder.setService", "code": "public PeriodicTask.Builder setService(Class gcmTaskService) {\n this.gcmTaskService = gcmTaskService.getName();\n return this;\n }", "docstring": "/**\n *

Set whichever {@link com.google.android.gms.gcm.GcmTaskService} you implement to execute the logic for this task.

\n *\n * @param gcmTaskService Endpoint against which you're scheduling this task.\n */", "url": "https://github.com/inotia00/mMicroG/blob/9067cd13a01d497f51bc5c896b7b33cc2f2032a5/play-services-gcm/src/main/java/com/google/android/gms/gcm/PeriodicTask.java#L202-L205", "sha": "9067cd13a01d497f51bc5c896b7b33cc2f2032a5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WifiScanner.isScanning", "code": "@RequiresPermission(android.Manifest.permission.LOCATION_HARDWARE)\n public boolean isScanning() {\n throw new UnsupportedOperationException();\n }", "docstring": "/**\n * Check whether the Wi-Fi subsystem has started a scan and is waiting for scan results.\n *\n * @return true if a scan initiated via\n * {@link WifiScanner#startScan(ScanSettings, ScanListener)} or\n * {@link WifiManager#startScan()} is in progress.\n * false if there is currently no scanning initiated by {@link WifiScanner} or\n * {@link WifiManager}, but it's still possible the wifi radio is scanning for\n * another reason.\n * @hide\n */", "url": "https://github.com/inotia00/mMicroG/blob/9067cd13a01d497f51bc5c896b7b33cc2f2032a5/play-services-location/system-api/src/main/java/android/net/wifi/WifiScanner.java#L1171-L1174", "sha": "9067cd13a01d497f51bc5c896b7b33cc2f2032a5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TileOverlayOptions.isVisible", "code": "public boolean isVisible() {\n return visible;\n }", "docstring": "/**\n * Gets the visibility setting for this {@link TileOverlayOptions} object.\n *\n * @return {@code true} if the tile overlay is to be visible; {@code false} if it is not.\n */", "url": "https://github.com/inotia00/mMicroG/blob/9067cd13a01d497f51bc5c896b7b33cc2f2032a5/play-services-maps/src/main/java/com/google/android/gms/maps/model/TileOverlayOptions.java#L97-L99", "sha": "9067cd13a01d497f51bc5c896b7b33cc2f2032a5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "initializePlayers", "code": "const initializePlayers = () => {\n new window.YT.Player('player1', {\n events: {\n onStateChange: (event: any) => {\n if (event.data === window.YT.PlayerState.PLAYING) {\n const player2Frame = document.getElementById(\n 'player2'\n ) as HTMLIFrameElement;\n player2Frame?.contentWindow?.postMessage(\n '{\"event\":\"command\",\"func\":\"pauseVideo\",\"args\":\"\"}',\n '*'\n );\n }\n },\n },\n });\n\n new window.YT.Player('player2', {\n events: {\n onStateChange: (event: any) => {\n if (event.data === window.YT.PlayerState.PLAYING) {\n const player1Frame = document.getElementById(\n 'player1'\n ) as HTMLIFrameElement;\n player1Frame?.contentWindow?.postMessage(\n '{\"event\":\"command\",\"func\":\"pauseVideo\",\"args\":\"\"}',\n '*'\n );\n }\n },\n },\n });\n };", "docstring": "// Initialize players when API is ready", "url": "https://github.com/gluestack/gluestack-ui/blob/cb33f5f04453642724cdde392c7bfaca132fc214/example/storybook-nativewind/src/extra-components/nativewind/VadimStream.tsx#L24-L56", "sha": "cb33f5f04453642724cdde392c7bfaca132fc214"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createAssertionError", "code": "function createAssertionError(\n options: ExtendedAssertionErrorConstructorOptions,\n): AssertionError {\n const error = new AssertionError(options);\n if (options.generatedMessage) {\n error.generatedMessage = true;\n }\n return error;\n}", "docstring": "// TODO(uki00a): This function is a workaround for setting the `generatedMessage` property flexibly.", "url": "https://github.com/supabase/edge-runtime/blob/89e59b0958cae6beb841038022e3afeee3e0f0bf/ext/node/polyfills/assert.ts#L48-L56", "sha": "89e59b0958cae6beb841038022e3afeee3e0f0bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RelateQueryWriter.parallel", "code": "parallel() {\n\t\treturn new RelateQueryWriter({\n\t\t\t...this.#state,\n\t\t\tparallel: true\n\t\t});\n\t}", "docstring": "/**\n\t * Run the query in parallel\n\t * \n\t * @returns The query writer\n\t */", "url": "https://github.com/StarlaneStudios/cirql/blob/cfecba42254fd2003a7cfb06c03306a041ca2b84/lib/writer/relate.ts#L195-L200", "sha": "cfecba42254fd2003a7cfb06c03306a041ca2b84"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "flatten", "code": "function flatten(obj: any, path: any = []) {\n // Iterate over the object's keys\n\n if (Array.isArray(obj)) {\n flat[`${path.join('.')}`] = obj;\n } else {\n for (const key of Object.keys(obj)) {\n // If the value is an object, recurse\n if (key === 'ids' && path.length > 0) {\n flat[`${path.join('.')}`] = obj[key];\n } else if (key === 'props') {\n flat[`${path.join('.')}.${key}`] = obj[key];\n } else if (typeof obj[key] === 'object') {\n flatten(obj[key], [...path, key]);\n } else {\n flat[`${path.join('.')}`] = obj[key];\n }\n }\n }\n }", "docstring": "// Recursive function to flatten the object", "url": "https://github.com/gluestack/gluestack-style/blob/3de2207b03d13894f7da9fa20c2ec4bf66760f2e/packages/react/src/styled.tsx#L62-L81", "sha": "3de2207b03d13894f7da9fa20c2ec4bf66760f2e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AttackModel.calculateDamage", "code": "public calculateDamage() {\n\t\treturn Math.max(\n\t\t\tthis.damage * this.damageMultiplier - this.damageReduction,\n\t\t\t0,\n\t\t)\n\t}", "docstring": "/** Calculates the damage for this attack */", "url": "https://github.com/hc-tcg/hc-tcg/blob/d19bc390c1f920a95a354b6e1573962e729577e9/common/models/attack-model.ts#L128-L133", "sha": "d19bc390c1f920a95a354b6e1573962e729577e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "parseUntil", "code": "function parseUntil(text: string, until: Array): [string, string] {\n\t// We take characters until we get to something that is probably a parser\n\tlet out = ''\n\tlet i = 0\n\n\tlet isEscaped = text[0] == '\\\\'\n\tlet nextChar: string | undefined = text[0]\n\n\twhile (true) {\n\t\tif (!isEscaped) {\n\t\t\tout += nextChar\n\t\t}\n\t\ti++\n\n\t\tif (i >= text.length) {\n\t\t\tbreak\n\t\t}\n\t\tnextChar = text.at(i)\n\t\tif (nextChar == undefined) {\n\t\t\tbreak\n\t\t}\n\n\t\tif (!isEscaped && until.includes(nextChar)) {\n\t\t\tbreak\n\t\t}\n\n\t\tisEscaped = nextChar === '\\\\'\n\t}\n\n\tif (text[text.length - 1] == '\\\\') {\n\t\tout += '\\\\'\n\t}\n\n\treturn [out, text.slice(i)]\n}", "docstring": "/* Parse the raw text that is part of a text mode or emoji node, handling escape sequences. */", "url": "https://github.com/hc-tcg/hc-tcg/blob/d19bc390c1f920a95a354b6e1573962e729577e9/common/utils/formatting.ts#L404-L438", "sha": "d19bc390c1f920a95a354b6e1573962e729577e9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Reka.getNodeLocation", "code": "getNodeLocation(node: t.Type) {\n return this.observer.getNodeLocation(node);\n }", "docstring": "/**\n * Get the nearest parent Node and its relative path of a given AST node in the State.\n */", "url": "https://github.com/prevwong/reka.js/blob/ad337b15e172270f9bee4d7bf8bc99850eae83fd/packages/core/src/reka.ts#L357-L359", "sha": "ad337b15e172270f9bee4d7bf8bc99850eae83fd"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PrjManage.initOpeParam", "code": "public async initOpeParam(context: vscode.ExtensionContext): Promise {\n const os = process.platform;\n const extensionPath = hdlPath.toSlash(context.extensionPath);\n const workspacePath = this.getWorkspacePath();\n const propertyJsonPath = hdlPath.join(workspacePath, '.vscode', 'property.json');\n const propertySchemaPath = hdlPath.join(extensionPath, 'project', 'property-schema.json');\n const propertyInitPath = hdlPath.join(extensionPath, 'project', 'property-init.json');\n\n opeParam.setBasicInfo(os, \n extensionPath, \n workspacePath, \n propertyJsonPath, \n propertySchemaPath, \n propertyInitPath);\n \n opeParam.prjInfo.initContextPath(extensionPath, workspacePath);\n const refreshPrjConfig: RefreshPrjConfig = {mkdir: true};\n if (fs.existsSync(propertyJsonPath)) {\n const rawPrjInfo = hdlFile.readJSON(propertyJsonPath) as RawPrjInfo;\n opeParam.mergePrjInfo(rawPrjInfo);\n } else {\n refreshPrjConfig.mkdir = false;\n }\n\n // 创建用户目录\n hdlDir.mkdir(opeParam.dideHome);\n // 同步部分文件\n const cachePPySchema = hdlPath.join(opeParam.dideHome, 'property-schema.json');\n const propertySchema = opeParam.propertySchemaPath;\n if (fs.existsSync(cachePPySchema) && checkJson(cachePPySchema)) {\n hdlFile.copyFile(cachePPySchema, propertySchema);\n }\n\n return refreshPrjConfig;\n }", "docstring": "/**\n * init opeParam\n * @param context \n */", "url": "https://github.com/Digital-EDA/Digital-IDE/blob/2f90a87a1c9df236ee621e586d8e56ac0e7aaa57/src/manager/prj.ts#L82-L116", "sha": "2f90a87a1c9df236ee621e586d8e56ac0e7aaa57"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HdlAction.updateLinter", "code": "async updateLinter(path: string) {\n }", "docstring": "// 下一个版本丢弃,完全由后端承担这部分功能", "url": "https://github.com/Digital-EDA/Digital-IDE/blob/2f90a87a1c9df236ee621e586d8e56ac0e7aaa57/src/monitor/hdl.ts#L157-L158", "sha": "2f90a87a1c9df236ee621e586d8e56ac0e7aaa57"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IssuanceService.verifyKnownTicket", "code": "private async verifyKnownTicket(\n client: PoolClient,\n serializedPCD: SerializedPCD\n ): Promise {\n if (!serializedPCD.type) {\n throw new Error(\"input was not a serialized PCD\");\n }\n\n if (\n serializedPCD.type !== EdDSATicketPCDPackage.name &&\n serializedPCD.type !== ZKEdDSAEventTicketPCDPackage.name\n ) {\n throw new Error(\n `serialized PCD was wrong type, '${serializedPCD.type}' instead of '${EdDSATicketPCDPackage.name}' or '${ZKEdDSAEventTicketPCDPackage.name}'`\n );\n }\n\n let eventId: string;\n let productId: string;\n let publicKey: EdDSAPublicKey;\n\n if (serializedPCD.type === EdDSATicketPCDPackage.name) {\n const pcd = await EdDSATicketPCDPackage.deserialize(serializedPCD.pcd);\n\n if (!EdDSATicketPCDPackage.verify(pcd)) {\n return {\n success: true,\n value: { verified: false, message: \"Could not verify PCD.\" }\n };\n }\n\n eventId = pcd.claim.ticket.eventId;\n productId = pcd.claim.ticket.productId;\n publicKey = pcd.proof.eddsaPCD.claim.publicKey;\n } else {\n const pcd = await ZKEdDSAEventTicketPCDPackage.deserialize(\n serializedPCD.pcd\n );\n\n if (!ZKEdDSAEventTicketPCDPackage.verify(pcd)) {\n return {\n success: true,\n value: { verified: false, message: \"Could not verify PCD.\" }\n };\n }\n\n if (\n !(pcd.claim.partialTicket.eventId && pcd.claim.partialTicket.productId)\n ) {\n return {\n success: true,\n value: {\n verified: false,\n message: \"PCD does not reveal the correct fields.\"\n }\n };\n }\n\n // Watermarks can be up to four hours old\n if (Date.now() - parseInt(pcd.claim.watermark) > ONE_HOUR_MS * 4) {\n return {\n success: true,\n value: {\n verified: false,\n message: \"PCD watermark has expired.\"\n }\n };\n }\n\n eventId = pcd.claim.partialTicket.eventId;\n productId = pcd.claim.partialTicket.productId;\n publicKey = pcd.claim.signer;\n }\n\n const knownTicketType = await fetchKnownTicketByEventAndProductId(\n client,\n eventId,\n productId\n );\n\n // If we found a known ticket type, compare public keys\n if (\n knownTicketType &&\n isEqualEdDSAPublicKey(JSON.parse(knownTicketType.public_key), publicKey)\n ) {\n // We can say that the submitted ticket can be verified as belonging\n // to a known group\n return {\n success: true,\n value: {\n verified: true,\n publicKeyName: knownTicketType.known_public_key_name,\n group: knownTicketType.ticket_group,\n eventName: knownTicketType.event_name\n }\n };\n } else {\n return {\n success: true,\n value: {\n verified: false,\n message: \"Not a recognized ticket\"\n }\n };\n }\n }", "docstring": "/**\n * Verifies a ticket based on:\n * 1) verification of the PCD (that it is correctly formed, with a proof\n * matching the claim)\n * 2) whether the ticket matches the ticket types known to us, e.g. Zuzalu\n * or Zuconnect tickets\n *\n */", "url": "https://github.com/proofcarryingdata/zupass/blob/b4757b45a8c4d09fe3d7cea06545a750fa94aeea/apps/passport-server/src/services/issuanceService.ts#L954-L1059", "sha": "b4757b45a8c4d09fe3d7cea06545a750fa94aeea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PipelineAPISubservice.handlePollFeed", "code": "public async handlePollFeed(\n pipelineId: string,\n req: PollFeedRequest\n ): Promise {\n return traced(SERVICE_NAME, \"handlePollFeed\", async (span) => {\n logger(LOG_TAG, `handlePollFeed`, pipelineId, str(req));\n span?.setAttribute(\"feed_id\", req.feedId);\n const pipelineSlot =\n await this.pipelineSubservice.ensurePipelineSlotExists(pipelineId);\n tracePipeline(pipelineSlot.definition);\n const pipeline =\n await this.pipelineSubservice.ensurePipelineStarted(pipelineId);\n const feed = ensureFeedIssuanceCapability(pipeline, req.feedId);\n const feedResponse = await feed.issue(req);\n traceFlattenedObject(span, {\n result: {\n actionCount: feedResponse.actions.length,\n pcdCount: getPcdsFromActions(feedResponse.actions).length\n }\n });\n return feedResponse;\n });\n }", "docstring": "/**\n * Handles incoming requests that hit a Pipeline-specific feed for PCDs\n * for every single pipeline that has this capability that this server manages.\n */", "url": "https://github.com/proofcarryingdata/zupass/blob/b4757b45a8c4d09fe3d7cea06545a750fa94aeea/apps/passport-server/src/services/generic-issuance/subservices/PipelineAPISubservice.ts#L77-L99", "sha": "b4757b45a8c4d09fe3d7cea06545a750fa94aeea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProtoPODGPC.findCircuit", "code": "public static findCircuit(\n familyName: string,\n circuitName: string,\n circuitFamily: ProtoPODGPCCircuitDesc[] = ProtoPODGPC.CIRCUIT_FAMILY\n ): ProtoPODGPCCircuitDesc | undefined {\n if (familyName && familyName !== PROTO_POD_GPC_FAMILY_NAME) {\n return undefined;\n }\n for (const circuitDesc of circuitFamily) {\n if (circuitName && circuitDesc.name === circuitName) {\n return circuitDesc;\n }\n }\n return undefined;\n }", "docstring": "/**\n * Finds the description of a circuit in this family by name.\n *\n * @param familyName the circuit family name\n * @param circuitName the name of the circuit\n * @param [circuitFamily=ProtoPODGPC.CIRCUIT_FAMILY] the circuit family to\n * search\n * @returns the circuit description, or undefined if the name is\n * unrecognized.\n */", "url": "https://github.com/proofcarryingdata/zupass/blob/b4757b45a8c4d09fe3d7cea06545a750fa94aeea/packages/lib/gpcircuits/src/proto-pod-gpc.ts#L604-L618", "sha": "b4757b45a8c4d09fe3d7cea06545a750fa94aeea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "replacer", "code": "function replacer(key: unknown, value: unknown): unknown {\n if (key === \"message\" && value instanceof Array) {\n return value.map((num: bigint) => num.toString(16));\n } else {\n return value;\n }\n}", "docstring": "/**\n * The replacer is used by `JSON.stringify` and, in this package, it is used within the\n * PCD's `serialize` function. It is called for each property on the JSON object and\n * converts the value of the property from a list of big integers to a list of hexadecimal\n * strings when the property's key name equals \"message\".\n * @param key The object property key.\n * @param value The object property value.\n * @returns The original value of the property or the converted one.\n */", "url": "https://github.com/proofcarryingdata/zupass/blob/b4757b45a8c4d09fe3d7cea06545a750fa94aeea/packages/pcd/eddsa-pcd/src/EDDSAPCDPackage.ts#L99-L105", "sha": "b4757b45a8c4d09fe3d7cea06545a750fa94aeea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DatabaseWriterWithTriggers._tableNameFromId", "code": "_tableNameFromId>(\n id: GenericId,\n ): TableName | null {\n for (const tableName of Object.keys(this.triggers.registered)) {\n if (\n this.innerDb.normalizeId(\n tableName as TableNamesInDataModel,\n id,\n )\n ) {\n return tableName as TableName;\n }\n }\n return null;\n }", "docstring": "// Helper methods.", "url": "https://github.com/get-convex/convex-helpers/blob/efd7053350a0fb6d8db1a0ded7df5c3de188d114/packages/convex-helpers/server/triggers.ts#L224-L238", "sha": "efd7053350a0fb6d8db1a0ded7df5c3de188d114"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DashboardDocsAwaitingReviewComponent.firstFourDocs", "code": "private get firstFourDocs() {\n return this.args.docs.slice(0, 4);\n }", "docstring": "/**\n * (Up to) the first four docs. The default documents shown.\n */", "url": "https://github.com/hashicorp-forge/hermes/blob/ced8f53c5fb431215abc31f4c09222c2e2981c6f/web/app/components/dashboard/docs-awaiting-review.ts#L26-L28", "sha": "ced8f53c5fb431215abc31f4c09222c2e2981c6f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DocumentSidebarComponent.allApprovers", "code": "protected get allApprovers() {\n return this.approverGroups.concat(this.approvers);\n }", "docstring": "/**\n * A computed property that returns all approvers and approverGroups.\n * Passed to the EditableField component to render the list of approvers and groups.\n * Recomputes when the approvers or approverGroups arrays change.\n */", "url": "https://github.com/hashicorp-forge/hermes/blob/ced8f53c5fb431215abc31f4c09222c2e2981c6f/web/app/components/document/sidebar.ts#L463-L465", "sha": "ced8f53c5fb431215abc31f4c09222c2e2981c6f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DocumentSidebarRelatedResourcesComponent.titleTooltipText", "code": "protected get titleTooltipText(): string {\n return `Documents and links that are relevant to this work.`;\n }", "docstring": "/**\n * The text passed to the TooltipIcon beside the title.\n */", "url": "https://github.com/hashicorp-forge/hermes/blob/ced8f53c5fb431215abc31f4c09222c2e2981c6f/web/app/components/document/sidebar/related-resources.ts#L117-L119", "sha": "ced8f53c5fb431215abc31f4c09222c2e2981c6f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProjectIndexComponent.jiraIsEnabled", "code": "protected get jiraIsEnabled() {\n return !!this.configSvc.config.jira_url;\n }", "docstring": "/**\n * Whether Jira is configured for the project.\n * Determines whether to show the Jira-related UI.\n */", "url": "https://github.com/hashicorp-forge/hermes/blob/ced8f53c5fb431215abc31f4c09222c2e2981c6f/web/app/components/project/index.ts#L120-L122", "sha": "ced8f53c5fb431215abc31f4c09222c2e2981c6f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProjectTileComponent.projectID", "code": "protected get projectID() {\n if (\"objectID\" in this.args.project) {\n return this.args.project.objectID;\n } else {\n return this.args.project.id;\n }\n }", "docstring": "/**\n * The project ID used as our LinkTo model.\n * If the project is an Algolia result, it has an `objectID`.\n * If the project is retrieved from the back-end, it has an `id`.\n */", "url": "https://github.com/hashicorp-forge/hermes/blob/ced8f53c5fb431215abc31f4c09222c2e2981c6f/web/app/components/project/tile.ts#L57-L63", "sha": "ced8f53c5fb431215abc31f4c09222c2e2981c6f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "loaderAdvancedConfig", "code": "async function loaderAdvancedConfig() {\n const testCrawlApp = createCrawl({\n log: false,\n baseUrl: 'http://localhost:8888'\n })\n\n const res = await testCrawlApp.crawlData({\n targets: ['/data', '/data'],\n proxy: { urls: ['http://localhost:7890'] },\n timeout: 10000,\n intervalTime: { max: 1000 },\n maxRetry: 0\n })\n\n return res.reduce((prev, item) => prev && item.isSuccess, true)\n}", "docstring": "// 2.2.Loader Advanced Config", "url": "https://github.com/coder-hxl/x-crawl/blob/5bea607d69c86da8c8f7b2e6b1dfa2078eb28952/test/automation/written/crawlData.test.ts#L74-L89", "sha": "5bea607d69c86da8c8f7b2e6b1dfa2078eb28952"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FFT._singleRealTransform2", "code": "_singleRealTransform2(outOff, off, step) {\n const out = this._out;\n const data = this._data;\n\n const evenR = data[off];\n const oddR = data[off + step];\n\n const leftR = evenR + oddR;\n const rightR = evenR - oddR;\n\n out[outOff] = leftR;\n out[outOff + 1] = 0;\n out[outOff + 2] = rightR;\n out[outOff + 3] = 0;\n }", "docstring": "// NOTE: Only called for len=4", "url": "https://github.com/IQEngine/IQEngine/blob/dd0fc48dee0cdb69fe8350b7429e59ae198aea25/client/src/utils/fft.ts#L445-L459", "sha": "dd0fc48dee0cdb69fe8350b7429e59ae198aea25"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Citation.listener", "code": "public async listener(t: number) {\n let isExecCommand = false;\n this.intervalID = window.setInterval(async () => {\n if (!Zotero.ZoteroCitation) {\n return this.clear();\n }\n if (!Zotero.Integration.currentSession || isExecCommand) {\n return;\n }\n const sessions = Zotero.Integration.sessions;\n const _sessions = this.sessions;\n for (const sessionID in sessions) {\n const session = sessions[sessionID];\n let _session: SessionData;\n if (!(session.agent as string).includes(\"Word\")) {\n continue;\n }\n // 初始化对象的session\n if (sessionID in _sessions) {\n _session = _sessions[sessionID];\n } else {\n _sessions[sessionID] = _session = { search: undefined, idData: {}, pending: true } as SessionData;\n await this.initSearch(sessionID);\n _session.pending = false;\n }\n // 其它线程search正在创建,则退出本次执行\n if (_session.pending == true && !_session.search) {\n return;\n }\n const citationsByItemID = session.citationsByItemID;\n // 分析排序\n const sortedItemIDs = this.getSortedItemIDs(session.citationsByIndex);\n this.updateCitations(sessionID, citationsByItemID, sortedItemIDs, session.styleClass);\n }\n }, t);\n window.addEventListener(\"close\", (event) => {\n event.preventDefault();\n try {\n this.clear();\n } catch {\n /* empty */\n }\n window.setTimeout(() => {\n window.close();\n });\n });\n const execCommand = Zotero.Integration.execCommand;\n const _sessions = this.sessions;\n // @ts-ignore ignore\n Zotero.Integration.execCommand = async function (agent, command, docId) {\n // eslint-disable-next-line prefer-rest-params\n console.log(...arguments);\n isExecCommand = true;\n // eslint-disable-next-line prefer-rest-params\n await execCommand(...arguments);\n isExecCommand = false;\n if (docId.endsWith(\"__doc__\")) {\n return;\n }\n const id = window.setInterval(async () => {\n const sessionID = Zotero.Integration?.currentSession?.sessionID;\n if (!sessionID) {\n console.log(\"sessionID is null, waiting...\");\n return;\n }\n window.clearInterval(id);\n console.log(\"clear interval\");\n let _session;\n while (!((_session ??= _sessions[sessionID]) && _session.search)) {\n await Zotero.Promise.delay(10);\n }\n console.log(_sessions);\n // 判断是否为插件修改过的名称,如果是则更新\n // 若为用户更改则不进行更新\n if ([sessionID, _session.lastName].indexOf(_session.search.name) != -1) {\n let targetName = docId\n try {\n targetName = PathUtils.split(docId).slice(-1)[0];\n } catch { }\n console.log(`${_session.search.name}->${targetName}`);\n // 修复Mac储存\n if (targetName && targetName.trim().length > 0) {\n _session.search.name = _session.lastName = targetName;\n await _session.search.saveTx({ skipSelect: true });\n }\n }\n }, 0);\n };\n }", "docstring": "/**\n * 监听session状态以生成搜索目录\n */", "url": "https://github.com/MuiseDestiny/zotero-citation/blob/10c5330216fb3cb5de04f8a93be21fadad8cbfb3/src/modules/citation.ts#L67-L155", "sha": "10c5330216fb3cb5de04f8a93be21fadad8cbfb3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_classicalRegister", "code": "const _classicalRegister = (\n startX: number,\n gateY: number,\n endX: number,\n wireY: number,\n): SVGElement => {\n const wirePadding = 1;\n // Draw vertical lines\n const vLine1: SVGElement = line(\n startX + wirePadding,\n gateY,\n startX + wirePadding,\n wireY - wirePadding,\n \"register-classical\",\n );\n const vLine2: SVGElement = line(\n startX - wirePadding,\n gateY,\n startX - wirePadding,\n wireY + wirePadding,\n \"register-classical\",\n );\n\n // Draw horizontal lines\n const hLine1: SVGElement = line(\n startX + wirePadding,\n wireY - wirePadding,\n endX,\n wireY - wirePadding,\n \"register-classical\",\n );\n const hLine2: SVGElement = line(\n startX - wirePadding,\n wireY + wirePadding,\n endX,\n wireY + wirePadding,\n \"register-classical\",\n );\n\n return group([vLine1, vLine2, hLine1, hLine2]);\n};", "docstring": "/**\n * Generates the SVG representation of a classical register.\n *\n * @param startX Start x coord.\n * @param gateY y coord of measurement gate.\n * @param endX End x coord.\n * @param wireY y coord of wire.\n *\n * @returns SVG representation of the given classical register.\n */", "url": "https://github.com/microsoft/qsharp/blob/902ec5f5f76c104051ac77bfd37c6784f660c489/npm/qsharp/ux/circuit-vis/formatters/registerFormatter.ts#L50-L90", "sha": "902ec5f5f76c104051ac77bfd37c6784f660c489"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FragmentState.maybeWidened", "code": "maybeWidened(t: T): T | PackageObjectToken {\n if (options.widening && t instanceof ObjectToken && this.widened.has(t))\n return this.a.canonicalizeToken(new PackageObjectToken(t.getPackageInfo(), t.kind));\n else\n return t;\n }", "docstring": "/**\n * If the provided token is an object token that has been widened, the corresponding package object token is returned.\n * Otherwise the provided token is returned as is.\n */", "url": "https://github.com/cs-au-dk/jelly/blob/1e61ea69662fa1181b7190e0a48a7044265176c8/src/analysis/fragmentstate.ts#L839-L844", "sha": "1e61ea69662fa1181b7190e0a48a7044265176c8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Ed25519Keypair.getSecretKey", "code": "getSecretKey(): string {\n return encodeRoochSercetKey(\n this.keypair.secretKey.slice(0, PRIVATE_KEY_SIZE),\n this.getKeyScheme(),\n )\n }", "docstring": "/**\n * The Bech32 secret key string for this Ed25519 keypair\n */", "url": "https://github.com/rooch-network/rooch/blob/a044edde8edbe0f7ea75c66fe47c74e25d6f6b7d/sdk/typescript/rooch-sdk/src/keypairs/ed25519/keypair.ts#L128-L133", "sha": "a044edde8edbe0f7ea75c66fe47c74e25d6f6b7d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Secp256k1PublicKey.flag", "code": "flag(): number {\n return SIGNATURE_SCHEME_TO_FLAG['Secp256k1']\n }", "docstring": "/**\n * Return the Rooch address associated with this Secp256k1 public key\n */", "url": "https://github.com/rooch-network/rooch/blob/a044edde8edbe0f7ea75c66fe47c74e25d6f6b7d/sdk/typescript/rooch-sdk/src/keypairs/secp256k1/publickey.ts#L74-L76", "sha": "a044edde8edbe0f7ea75c66fe47c74e25d6f6b7d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "deserializeValue", "code": "function deserializeValue(value: any, options: EJSONOptions = {}) {\n if (typeof value === 'number') {\n // TODO(NODE-4377): EJSON js number handling diverges from BSON\n const in32BitRange = value <= BSON_INT32_MAX && value >= BSON_INT32_MIN;\n const in64BitRange = value <= BSON_INT64_MAX && value >= BSON_INT64_MIN;\n\n if (options.relaxed || options.legacy) {\n return value;\n }\n\n if (Number.isInteger(value) && !Object.is(value, -0)) {\n // interpret as being of the smallest BSON integer type that can represent the number exactly\n if (in32BitRange) {\n return new Int32(value);\n }\n if (in64BitRange) {\n if (options.useBigInt64) {\n // eslint-disable-next-line no-restricted-globals -- This is allowed here as useBigInt64=true\n return BigInt(value);\n }\n return Long.fromNumber(value);\n }\n }\n\n // If the number is a non-integer or out of integer range, should interpret as BSON Double.\n return new Double(value);\n }\n\n // from here on out we're looking for bson types, so bail if its not an object\n if (value == null || typeof value !== 'object') return value;\n\n // upgrade deprecated undefined to null\n if (value.$undefined) return null;\n\n const keys = Object.keys(value).filter(\n k => k.startsWith('$') && value[k] != null\n ) as (keyof typeof keysToCodecs)[];\n for (let i = 0; i < keys.length; i++) {\n const c = keysToCodecs[keys[i]];\n if (c) return c.fromExtendedJSON(value, options);\n }\n\n if (value.$date != null) {\n const d = value.$date;\n const date = new Date();\n\n if (options.legacy) {\n if (typeof d === 'number') date.setTime(d);\n else if (typeof d === 'string') date.setTime(Date.parse(d));\n else if (typeof d === 'bigint') date.setTime(Number(d));\n else throw new BSONRuntimeError(`Unrecognized type for EJSON date: ${typeof d}`);\n } else {\n if (typeof d === 'string') date.setTime(Date.parse(d));\n else if (Long.isLong(d)) date.setTime(d.toNumber());\n else if (typeof d === 'number' && options.relaxed) date.setTime(d);\n else if (typeof d === 'bigint') date.setTime(Number(d));\n else throw new BSONRuntimeError(`Unrecognized type for EJSON date: ${typeof d}`);\n }\n return date;\n }\n\n if (value.$code != null) {\n const copy = Object.assign({}, value);\n if (value.$scope) {\n copy.$scope = deserializeValue(value.$scope);\n }\n\n return Code.fromExtendedJSON(value);\n }\n\n if (isDBRefLike(value) || value.$dbPointer) {\n const v = value.$ref ? value : value.$dbPointer;\n\n // we run into this in a \"degenerate EJSON\" case (with $id and $ref order flipped)\n // because of the order JSON.parse goes through the document\n if (v instanceof DBRef) return v;\n\n const dollarKeys = Object.keys(v).filter(k => k.startsWith('$'));\n let valid = true;\n dollarKeys.forEach(k => {\n if (['$ref', '$id', '$db'].indexOf(k) === -1) valid = false;\n });\n\n // only make DBRef if $ keys are all valid\n if (valid) return DBRef.fromExtendedJSON(v);\n }\n\n return value;\n}", "docstring": "// eslint-disable-next-line @typescript-eslint/no-explicit-any", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/AI-PROJECTS/DALLE-3-IMAGE-GENERATOR/backend/node_modules/bson/src/extended_json.ts#L87-L175", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ClientSession.advanceOperationTime", "code": "advanceOperationTime(operationTime: Timestamp): void {\n if (this.operationTime == null) {\n this.operationTime = operationTime;\n return;\n }\n\n if (operationTime.greaterThan(this.operationTime)) {\n this.operationTime = operationTime;\n }\n }", "docstring": "/**\n * Advances the operationTime for a ClientSession.\n *\n * @param operationTime - the `BSON.Timestamp` of the operation type it is desired to advance to\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/AI-PROJECTS/DALLE-3-IMAGE-GENERATOR/backend/node_modules/mongodb/src/sessions.ts#L289-L298", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "List.last", "code": "last(): T | null {\n // If the list is empty, value will be the head's null\n return this.head.prev.value;\n }", "docstring": "/** Returns the last item in the list, does not remove */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/AI-PROJECTS/DALLE-3-IMAGE-GENERATOR/backend/node_modules/mongodb/src/utils.ts#L835-L838", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isFieldOnly", "code": "function isFieldOnly(node: any): boolean {\n return node && node.type === \"column_ref\";\n }", "docstring": "// Helper function to check if a node is a field-only reference (column_ref without literal)", "url": "https://github.com/openobserve/openobserve/blob/3bac86c507feca65726e1d301cab886f03194b10/web/src/composables/useLogs.ts#L4494-L4496", "sha": "3bac86c507feca65726e1d301cab886f03194b10"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "onDrop", "code": "function onDrop(event:any ,offSet:any = {x:0,y:0}) {\n if (\n pipelineObj.hasInputNode &&\n pipelineObj.draggedNode.io_type == \"input\"\n ) {\n $q.notify({\n message: \"Only 1 source node is allowed\",\n color: \"negative\",\n position: \"bottom\",\n timeout: 2000, \n });\n return;\n }\n\n const position = screenToFlowCoordinate({\n x: event.clientX + offSet.x,\n y: event.clientY + offSet.y,\n });\n\n const nodeId = getUUID();\n\n const newNode = {\n id: nodeId,\n type: pipelineObj.draggedNode.io_type || \"default\",\n io_type: pipelineObj.draggedNode.io_type || \"default\",\n position,\n data: { label: nodeId, node_type: pipelineObj.draggedNode.subtype },\n };\n\n /**\n * Align node position after drop, so it's centered to the mouse\n *\n * We can hook into events even in a callback, and we can remove the event listener after it's been called.\n */\n const { off } = onNodesInitialized(() => {\n updateNode(nodeId, (node) => ({\n position: {\n x: node.position.x - node.dimensions.width / 2,\n y: node.position.y - node.dimensions.height / 2,\n },\n }));\n\n off();\n });\n\n pipelineObj.currentSelectedNodeData = newNode;\n pipelineObj.dialog.name = newNode.data.node_type;\n pipelineObj.dialog.show = true;\n pipelineObj.isEditNode = false;\n\n }", "docstring": "/**\n * Handles the drop event.\n *\n * @param {DragEvent} event\n */", "url": "https://github.com/openobserve/openobserve/blob/3bac86c507feca65726e1d301cab886f03194b10/web/src/plugins/pipelines/useDnD.ts#L149-L199", "sha": "3bac86c507feca65726e1d301cab886f03194b10"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "parseConstraints", "code": "function parseConstraints(constraints?: EdgeConstraint[]): EdgeConstraint[] {\n if (!constraints) {\n return [];\n }\n\n return constraints.map((c) => {\n const constraintType = c.type;\n const { source, target } = c;\n\n return {\n ...c,\n source,\n target,\n type: constraintType,\n };\n });\n}", "docstring": "/**\n * Parse initially defined constraints.\n * Reverses backward directed edges.\n *\n * @param constraints constraints to parse\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/dara-components/js/graphs/visual-edge-encoder.tsx#L67-L83", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NavigateTo", "code": "const NavigateTo: ActionHandler = (ctx, actionImpl): void => {\n const isValidUrl = isValidHttpUrl(actionImpl.url);\n\n if (!isValidUrl) {\n throw new Error(`Invalid URL: ${actionImpl.url}`);\n }\n\n if (actionImpl.new_tab) {\n window.open(actionImpl.url, actionImpl.new_tab ? '_blank' : undefined);\n } else {\n ctx.history.push(actionImpl.url);\n }\n};", "docstring": "/**\n * Front-end handler for NavigateTo action.\n * Navigates to a specified URL.\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/dara-core/js/actions/navigate-to.tsx#L29-L41", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UpdateVariable", "code": "const UpdateVariable: ActionHandler = async (ctx, actionImpl) => {\n let varAtom;\n let eventName: 'PLAIN_VARIABLE_LOADED' | 'URL_VARIABLE_LOADED';\n\n // Make sure the variable is registered\n switch (actionImpl.variable.__typename) {\n case 'Variable':\n varAtom = getOrRegisterPlainVariable(actionImpl.variable, ctx.wsClient, ctx.taskCtx, ctx.extras);\n eventName = 'PLAIN_VARIABLE_LOADED';\n break;\n case 'UrlVariable':\n varAtom = getOrRegisterUrlVariable(actionImpl.variable);\n eventName = 'URL_VARIABLE_LOADED';\n break;\n case 'DataVariable':\n throw new Error('DataVariable is not supported in UpdateVariable action');\n }\n\n let newValue;\n\n if (actionImpl.value === INPUT) {\n newValue = ctx.input;\n } else if (actionImpl.value === TOGGLE) {\n // normally we'd use the updater form here, but we need to know what value we're\n // toggling to emit the correct event, and the updater must be pure\n const value = await ctx.snapshot.getLoadable(varAtom).toPromise();\n newValue = !value;\n } else {\n newValue = actionImpl.value;\n }\n\n ctx.set(varAtom, newValue);\n ctx.eventBus.publish(eventName, { variable: actionImpl.variable as any, value: newValue });\n};", "docstring": "/**\n * Front-end handler for UpdateVariable action.\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/dara-core/js/actions/update-variable.tsx#L18-L51", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cleanSessionCache", "code": "function cleanSessionCache(sessionToken: string): void {\n for (const storage of [sessionStorage, localStorage]) {\n const keys = Object.keys(storage);\n\n keys.forEach((key) => {\n // Remove keys related to a different Dara session\n if (key.startsWith('dara-session') && !key.startsWith(`dara-session-${sessionToken}`)) {\n storage.removeItem(key);\n }\n });\n }\n}", "docstring": "/**\n * Clean up session storage cache.\n * Purges sessionStorage persisted values which are related to a different session than the current one.\n *\n * @param sessionToken current session token\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/dara-core/js/shared/utils/clean-session-cache.tsx#L7-L18", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Modal", "code": "function Modal(props: ModalProps): JSX.Element {\n const [mounted, setMounted] = useState(false);\n const [renderModal, setRenderModal] = useState(false);\n\n // Internal state is updated using the useEffect to delay it to the next tick. This allows for the components css\n // animations to work correctly\n useEffect(() => {\n setRenderModal(props.render);\n }, [props.render]);\n\n useEffect(() => {\n if (renderModal) {\n const keyHandler = (e: KeyboardEvent): void => {\n if (e.key === Key.ESCAPE && props.onAttemptClose) {\n props.onAttemptClose();\n }\n };\n document.addEventListener('keydown', keyHandler);\n return () => {\n document.removeEventListener('keydown', keyHandler);\n };\n }\n // eslint-disable-next-line react-hooks/exhaustive-deps\n }, [renderModal, props.onAttemptClose]);\n\n if (!props.render && !mounted) {\n return null;\n }\n\n const onTransitionEnd = (): void => {\n setMounted(props.render);\n if (!props.render && props.onClosed) {\n props.onClosed();\n }\n };\n\n const stopPropagation = (e: React.MouseEvent): void => {\n e.stopPropagation();\n };\n\n return ReactDOM.createPortal(\n \n \n {props.children}\n \n ,\n document.body\n );\n}", "docstring": "/**\n * A simple modal component, accepts children and a render property. It handles attaching the modal to the body of the\n * document and transitioning it in and out of view as required\n *\n * @param {ModalProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-components/src/modal/modal.tsx#L117-L170", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EllipsisH", "code": "const EllipsisH = (props: IconProps): JSX.Element => {\n return ;\n};", "docstring": "/**\n * EllipsisH icon from FontAwesome\n *\n * @param {IconProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-icons/src/ellipsis-h.tsx#L26-L28", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Dizzy", "code": "const Dizzy = (props: IconProps): JSX.Element => {\n return ;\n};", "docstring": "/**\n * Dizzy icon from FontAwesome\n *\n * @param {IconProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-icons/src/dizzy.tsx#L26-L28", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HouseMedicalCircleExclamation", "code": "const HouseMedicalCircleExclamation = (props: IconProps): JSX.Element => {\n return ;\n};", "docstring": "/**\n * HouseMedicalCircleExclamation icon from FontAwesome\n *\n * @param {IconProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-icons/src/house-medical-circle-exclamation.tsx#L26-L28", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RoadSpikes", "code": "const RoadSpikes = (props: IconProps): JSX.Element => {\n return ;\n};", "docstring": "/**\n * RoadSpikes icon from FontAwesome\n *\n * @param {IconProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-icons/src/road-spikes.tsx#L26-L28", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PersonCircleMinus", "code": "const PersonCircleMinus = (props: IconProps): JSX.Element => {\n return ;\n};", "docstring": "/**\n * PersonCircleMinus icon from FontAwesome\n *\n * @param {IconProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-icons/src/person-circle-minus.tsx#L26-L28", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PlaneLock", "code": "const PlaneLock = (props: IconProps): JSX.Element => {\n return ;\n};", "docstring": "/**\n * PlaneLock icon from FontAwesome\n *\n * @param {IconProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-icons/src/plane-lock.tsx#L26-L28", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "List", "code": "const List = (props: IconProps): JSX.Element => {\n return ;\n};", "docstring": "/**\n * List icon from FontAwesome\n *\n * @param {IconProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-icons/src/list.tsx#L26-L28", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UserSecret", "code": "const UserSecret = (props: IconProps): JSX.Element => {\n return ;\n};", "docstring": "/**\n * UserSecret icon from FontAwesome\n *\n * @param {IconProps} props - the component props\n */", "url": "https://github.com/causalens/dara/blob/8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9/packages/ui-icons/src/user-secret.tsx#L26-L28", "sha": "8769b0ad0ff467a75ebaf5bb872a7d6d48a629f9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QuarkElement.update", "code": "update() {\n this.getOrInitRenderWatcher().update()\n }", "docstring": "// Reserve, may expand in the future", "url": "https://github.com/hellof2e/quark-core/blob/3128425ec427f16344ff236c5f74adcfc24de2c7/packages/core/src/main.ts#L533-L535", "sha": "3128425ec427f16344ff236c5f74adcfc24de2c7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Element.removeAttribute", "code": "removeAttribute(name: string) {\n if (this._$nodeAttributes) {\n delete this._$nodeAttributes[name]\n }\n const be = this._$backendElement\n if (be) {\n if (ENV.DEV) performanceMeasureStart('backend.removeAttribute')\n be.removeAttribute(name)\n if (ENV.DEV) performanceMeasureEnd()\n }\n if (this._$mutationObserverTarget) {\n MutationObserverTarget.callAttrObservers(this, {\n type: 'properties',\n target: this,\n nameType: 'attribute',\n attributeName: name,\n })\n }\n }", "docstring": "/** Remove an attribute */", "url": "https://github.com/wechat-miniprogram/glass-easel/blob/7e6976956d5fff398ad93c09b2074a59c01191b8/glass-easel/src/element.ts#L2499-L2517", "sha": "7e6976956d5fff398ad93c09b2074a59c01191b8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "shortenNumber", "code": "const shortenNumber = (number: number): string | number => {\n if (number > 0 && number < 1) return number.toString().replace(\"0.\", \".\");\n return number;\n };", "docstring": "// Removes leading zero before floating point if necessary", "url": "https://github.com/romgrk/kui.nvim/blob/b3b2f53d6678dce86acc91043b32eab6059ce0cf/src/colord/plugins/minify.ts#L60-L63", "sha": "b3b2f53d6678dce86acc91043b32eab6059ce0cf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Graphics.clear", "code": "public clear(): this\n {\n this._geometry.clear();\n this._lineStyle.reset();\n this._fillStyle.reset();\n\n this._boundsID++;\n this._matrix = null;\n this._holeMode = false;\n this.currentPath = null;\n\n return this;\n }", "docstring": "/**\n * Clears the graphics that were drawn to this Graphics object, and resets fill and line style settings.\n * @returns - This Graphics object. Good for chaining method calls\n */", "url": "https://github.com/romgrk/kui.nvim/blob/b3b2f53d6678dce86acc91043b32eab6059ce0cf/src/graphics/Graphics.ts#L811-L823", "sha": "b3b2f53d6678dce86acc91043b32eab6059ce0cf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Rectangle.ceil", "code": "ceil(resolution = 1, eps = 0.001): this\n {\n const x2 = Math.ceil((this.x + this.width - eps) * resolution) / resolution;\n const y2 = Math.ceil((this.y + this.height - eps) * resolution) / resolution;\n\n this.x = Math.floor((this.x + eps) * resolution) / resolution;\n this.y = Math.floor((this.y + eps) * resolution) / resolution;\n\n this.width = x2 - this.x;\n this.height = y2 - this.y;\n\n return this;\n }", "docstring": "/**\n * Enlarges rectangle that way its corners lie on grid\n * @param resolution - resolution\n * @param eps - precision\n * @returns Returns itself.\n */", "url": "https://github.com/romgrk/kui.nvim/blob/b3b2f53d6678dce86acc91043b32eab6059ce0cf/src/math/shapes/Rectangle.ts#L289-L301", "sha": "b3b2f53d6678dce86acc91043b32eab6059ce0cf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "hierarchyConv", "code": "function hierarchyConv(state) {\n const parent = {\n name: \"root\",\n children: [],\n };\n\n function addNodeToTree(key, value, parent) {\n if (Array.isArray(value) && value.length > 0) {\n const node = { name: key, children: [] };\n parent.children.push(node);\n value.forEach((item, index) => {\n addNodeToTree(`[${index}]`, item, node);\n });\n } else if (typeof value === \"object\" && Object.keys(value).length > 0) {\n const node = { name: key, children: [] };\n parent.children.push(node);\n Object.entries(value).forEach(([key, val]) => {\n addNodeToTree(key, val, node);\n });\n } else {\n if (typeof value === \"object\") {\n const node = { name: key, attributes: { value: \"empty\" } };\n parent.children.push(node);\n } else {\n const node = { name: key, attributes: { value } };\n parent.children.push(node);\n }\n }\n }\n\n addNodeToTree(\"state\", state, parent);\n return parent.children[0];\n}", "docstring": "/* \nhierarchyConv takes in the state to display in the visualization tree,\nand converts it to a format readable by D3. \n\nconst example = {\n name: 'State',\n children: [\n {\n name: 'member1',\n attributes: {\n value: 'org1',\n },\n },\n {\n name: 'member2',\n children: [\n name: 'member3',\n attributes: {\n value: 'org2'\n }\n ]\n }\n ]\n} \n */", "url": "https://github.com/oslabs-beta/Zukeeper/blob/b6cca3edfcc65951d40e93878f046b848c2bdbe7/src/client/algorithms/hierarchyConv.ts#L27-L59", "sha": "b6cca3edfcc65951d40e93878f046b848c2bdbe7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "parseQueryString", "code": "function parseQueryString(query) {\n const params = {};\n query.split('&').forEach(function (part) {\n // 使用正则表达式匹配键和值,直到遇到第一个等号为止\n const regex = /^(.*?)=(.*)/;\n const match = part.match(regex);\n if (match) {\n const key = decodeURIComponent(match[1]);\n const value = decodeURIComponent(match[2]);\n params[key] = value;\n }\n });\n return params;\n}", "docstring": "//字符串To对象", "url": "https://github.com/Hiram-Wong/ZyPlayer/blob/0829adc234aa344d07c518aa8873b75765eb87df/src/main/core/server/routes/v1/site/cms/adapter/drpy/drpy3.ts#L1921-L1934", "sha": "0829adc234aa344d07c518aa8873b75765eb87df"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Replicache.clientGroupID", "code": "get clientGroupID(): Promise {\n return this.#impl.clientGroupID;\n }", "docstring": "/**\n * The client group ID for this instance of Replicache. Instances of\n * Replicache will have the same client group ID if and only if they have\n * the same name, mutators, indexes, schema version, format version, and\n * browser profile.\n */", "url": "https://github.com/rocicorp/mono/blob/c81e8ff6903fe24165cee02cce7c79629cdc70e4/packages/replicache/src/replicache.ts#L281-L283", "sha": "c81e8ff6903fe24165cee02cce7c79629cdc70e4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "filterOutput", "code": "const filterOutput = (\n dataFromCSV: any,\n output: string | string[] | string[][]\n) => {\n if (output === '*') {\n return nanifyEmptyValues(dataFromCSV);\n }\n\n if (Array.isArray(output)) {\n /** Check if it's a multidimensional array. */\n if (Array.isArray(output[0])) {\n const result: any = {};\n\n output.forEach(outputField => {\n /** Check if there is no renaming request, then export as is */\n const outputTitle = outputField[1] || outputField[0];\n result[outputTitle] = fieldAccessor(outputField[0], dataFromCSV);\n });\n\n return result;\n }\n\n const outputTitle = output[1] || output[0];\n\n return {\n [outputTitle as string]: fieldAccessor(output[0], dataFromCSV),\n };\n }\n\n return {\n [output]: fieldAccessor(output, dataFromCSV),\n };\n};", "docstring": "/**\n * 1. If output is anything, then removes query data from csv record to escape duplicates.\n * 2. Otherwise checks if it's a miltidimensional array, then grabs multiple fields ().\n * 3. If not, then returns single field.\n * 4. In case if it's string, then\n */", "url": "https://github.com/Green-Software-Foundation/if/blob/a202ce71dc4d970b98e090d7714677734112be5c/src/if-run/builtins/csv-import/index.ts#L61-L93", "sha": "a202ce71dc4d970b98e090d7714677734112be5c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "computeNode", "code": "const computeNode = async (node: Node, params: ComputeParams): Promise => {\n const pipeline = node.pipeline || (params.pipeline as PhasedPipeline);\n const config = node.config || params.config;\n const defaults = node.defaults || params.defaults;\n const noFlags = !params.observe && !params.regroup && !params.compute;\n\n debugLogger.setExecutingPluginName();\n warnIfConfigProvided(node);\n\n if (node.children) {\n return traverse(node.children, {\n ...params,\n pipeline,\n defaults,\n config,\n });\n }\n\n let outputStorage = structuredClone(node.inputs) as PluginParams[];\n outputStorage = mergeDefaults(outputStorage, defaults);\n const pipelineCopy = structuredClone(pipeline) || {};\n\n /** Checks if pipeline is not an array or empty object. */\n if (\n Array.isArray(pipelineCopy) ||\n (typeof pipelineCopy === 'object' &&\n pipelineCopy !== null &&\n Object.keys(pipelineCopy).length === 0)\n ) {\n logger.warn(EMPTY_PIPELINE);\n }\n\n /**\n * If iteration is on observe pipeline, then executes observe plugins and sets the inputs value.\n */\n if ((noFlags || params.observe) && pipelineCopy.observe) {\n while (pipelineCopy.observe.length !== 0) {\n const pluginName = pipelineCopy.observe.shift() as string;\n console.debug(OBSERVING(pluginName));\n debugLogger.setExecutingPluginName(pluginName);\n\n const plugin = params.pluginStorage.get(pluginName);\n const nodeConfig = config && config[pluginName];\n\n outputStorage = await plugin.execute(outputStorage, nodeConfig);\n node.inputs = outputStorage;\n\n if (params.context.explainer) {\n addExplainData({\n pluginName,\n metadata: plugin.metadata,\n });\n }\n }\n }\n\n /**\n * If regroup is requested, execute regroup strategy, delete child's inputs, outputs and empty regroup array.\n */\n if ((noFlags || params.regroup) && pipelineCopy.regroup) {\n const originalOutputs = params.append ? node.outputs || [] : [];\n\n if (!isRegrouped(pipelineCopy.regroup, outputStorage, childNames)) {\n node.children = Regroup(\n outputStorage,\n originalOutputs,\n pipelineCopy.regroup\n );\n\n delete node.inputs;\n delete node.outputs;\n\n debugLogger.setExecutingPluginName();\n console.debug(REGROUPING);\n\n return traverse(node.children, {\n ...params,\n pipeline: {\n ...pipelineCopy,\n regroup: undefined,\n },\n defaults,\n config,\n });\n } else {\n console.debug(SKIPPING_REGROUP);\n }\n }\n\n console.debug('\\n');\n\n /**\n * If iteration is on compute plugin, then executes compute plugins and sets the outputs value.\n */\n if ((noFlags || params.compute) && pipelineCopy.compute) {\n const originalOutputs = params.append ? node.outputs || [] : [];\n\n while (pipelineCopy.compute.length !== 0) {\n const pluginName = pipelineCopy.compute.shift() as string;\n const plugin = params.pluginStorage.get(pluginName);\n const nodeConfig = config && config[pluginName];\n\n console.debug(COMPUTING_PIPELINE_FOR_NODE(pluginName));\n debugLogger.setExecutingPluginName(pluginName);\n\n outputStorage = await plugin.execute(outputStorage, nodeConfig);\n\n debugLogger.setExecutingPluginName();\n\n node.outputs = outputStorage;\n\n if (params.context.explainer) {\n addExplainData({\n pluginName,\n metadata: plugin.metadata,\n });\n }\n }\n\n if (params.append) {\n node.outputs = originalOutputs.concat(node.outputs || []);\n }\n }\n\n console.debug('\\n');\n};", "docstring": "/**\n * 1. If the node has it's own pipeline, defaults or config then use that,\n * otherwise use whatever has been passed down from further up the tree.\n * 2. If it's a grouping node, then first of all computes all it's children.\n * This is doing a depth first traversal.\n * 3. Otherwise merges the defaults into the inputs.\n * 4. Iterates over pipeline phases (observe, regroup, compute).\n * 5. Observe plugins are used to insert input values\n * (isolated execution can be achived by passing `--observe` flag to CLI command).\n * 6. Regroup plugin is used to group existing inputs by criteria\n * (isolated execution can be achived by passing `--regroup` flag to CLI command).\n * Since it creates new children for node, existing inputs and outputs are dropped and recursive traversal is called\n * for newbord child component.\n * 7. Compute plugins are used to do desired computations and appending the result to outputs\n * (isolated execution can be achived by passing `--compute` flag to CLI command).\n */", "url": "https://github.com/Green-Software-Foundation/if/blob/a202ce71dc4d970b98e090d7714677734112be5c/src/if-run/lib/compute.ts#L88-L213", "sha": "a202ce71dc4d970b98e090d7714677734112be5c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BitPositionRegistry.claim", "code": "public claim(): BitPosition {\n\t\tconst { recycled } = this;\n\n\t\t// Claim the next bit.\n\t\tconst claimed = (recycled.length > 0) ? recycled.pop() : (this.next++);\n\n\t\t// Update the field.\n\t\tthis.asField = BitField.or(this.asField, BitField.fromPosition(claimed as BitPosition));\n\t\treturn claimed as BitPosition;\n\t}", "docstring": "/**\n\t * Claims a bit from the registry.\n\t */", "url": "https://github.com/eth-p/obsidian-callout-manager/blob/d425fd450cd06bb776bd04826e5c97d7ffad5407/src/search/bitfield.ts#L125-L134", "sha": "d425fd450cd06bb776bd04826e5c97d7ffad5407"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getFileStats", "code": "async function getFileStats(filePath: string): Promise {\n const stats = await fs.stat(filePath);\n return {\n size: stats.size,\n created: stats.birthtime,\n modified: stats.mtime,\n accessed: stats.atime,\n isDirectory: stats.isDirectory(),\n isFile: stats.isFile(),\n permissions: stats.mode.toString(8).slice(-3),\n };\n}", "docstring": "// Tool implementations", "url": "https://github.com/danny-avila/LibreChat/blob/52a6de2aa756564ffe12114ebfce0e7f93ea125c/packages/mcp/src/examples/filesystem.ts#L195-L206", "sha": "52a6de2aa756564ffe12114ebfce0e7f93ea125c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BlockHeader.cliqueEpochTransitionSigners", "code": "cliqueEpochTransitionSigners(): EthjsAddress[] {\n\t\tthis._requireClique('cliqueEpochTransitionSigners')\n\t\tif (!this.cliqueIsEpochTransition()) {\n\t\t\tconst msg = this._errorMsg('Signers are only included in epoch transition blocks (clique)')\n\t\t\tthrow new Error(msg)\n\t\t}\n\n\t\tconst start = CLIQUE_EXTRA_VANITY\n\t\tconst end = this.extraData.length - CLIQUE_EXTRA_SEAL\n\t\tconst signerBytes = this.extraData.subarray(start, end)\n\n\t\tconst signerList: Uint8Array[] = []\n\t\tconst signerLength = 20\n\t\tfor (let start = 0; start <= signerBytes.length - signerLength; start += signerLength) {\n\t\t\tsignerList.push(signerBytes.subarray(start, start + signerLength))\n\t\t}\n\t\treturn signerList.map((buf) => new EthjsAddress(buf))\n\t}", "docstring": "/**\n\t * Returns a list of signers\n\t * (only clique PoA, throws otherwise)\n\t *\n\t * This function throws if not called on an epoch\n\t * transition block and should therefore be used\n\t * in conjunction with {@link BlockHeader.cliqueIsEpochTransition}\n\t */", "url": "https://github.com/evmts/tevm-monorepo/blob/5b069dac6d3daa99146ce12f749c407f4519d0e2/packages/block/src/header.ts#L823-L840", "sha": "5b069dac6d3daa99146ce12f749c407f4519d0e2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EmulationApi.emulateMessageToAccountEvent", "code": "async emulateMessageToAccountEvent(requestParameters: EmulateMessageToAccountEventRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise {\n const response = await this.emulateMessageToAccountEventRaw(requestParameters, initOverrides);\n return await response.value();\n }", "docstring": "/**\n * Emulate sending message to blockchain\n */", "url": "https://github.com/tonkeeper/tonkeeper-web/blob/c8b92adbbdf6124fac1d83d30ab0376e9a6aad76/packages/core/src/tonApiV2/apis/EmulationApi.ts#L243-L246", "sha": "c8b92adbbdf6124fac1d83d30ab0376e9a6aad76"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EmulationApi.emulateMessageToTrace", "code": "async emulateMessageToTrace(requestParameters: EmulateMessageToTraceRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise {\n const response = await this.emulateMessageToTraceRaw(requestParameters, initOverrides);\n return await response.value();\n }", "docstring": "/**\n * Emulate sending message to blockchain\n */", "url": "https://github.com/tonkeeper/tonkeeper-web/blob/c8b92adbbdf6124fac1d83d30ab0376e9a6aad76/packages/core/src/tonApiV2/apis/EmulationApi.ts#L327-L330", "sha": "c8b92adbbdf6124fac1d83d30ab0376e9a6aad76"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StakingApi.getAccountNominatorsPoolsRaw", "code": "async getAccountNominatorsPoolsRaw(requestParameters: GetAccountNominatorsPoolsRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise> {\n if (requestParameters['accountId'] == null) {\n throw new runtime.RequiredError(\n 'accountId',\n 'Required parameter \"accountId\" was null or undefined when calling getAccountNominatorsPools().'\n );\n }\n\n const queryParameters: any = {};\n\n const headerParameters: runtime.HTTPHeaders = {};\n\n const response = await this.request({\n path: `/v2/staking/nominator/{account_id}/pools`.replace(`{${\"account_id\"}}`, encodeURIComponent(String(requestParameters['accountId']))),\n method: 'GET',\n headers: headerParameters,\n query: queryParameters,\n }, initOverrides);\n\n return new runtime.JSONApiResponse(response, (jsonValue) => AccountStakingFromJSON(jsonValue));\n }", "docstring": "/**\n * All pools where account participates\n */", "url": "https://github.com/tonkeeper/tonkeeper-web/blob/c8b92adbbdf6124fac1d83d30ab0376e9a6aad76/packages/core/src/tonApiV2/apis/StakingApi.ts#L129-L149", "sha": "c8b92adbbdf6124fac1d83d30ab0376e9a6aad76"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StatsServiceService.sendQueryToStats", "code": "public static sendQueryToStats(\n chain?: Chain,\n requestBody?: {\n project_id: number;\n name?: string;\n query?: string;\n gpt_message?: string;\n /**\n * cyclic execution of requests\n */\n repeat_interval?: number;\n },\n ): CancelablePromise {\n return __request(OpenAPI, {\n method: 'POST',\n url: '/api/v1/services/stats/query',\n query: {\n 'chain': chain,\n },\n body: requestBody,\n mediaType: 'application/json',\n errors: {\n 400: `Something went wrong on client side`,\n 403: `Access token is missing or invalid`,\n 404: `The specified resource was not found`,\n 500: `Something went wrong on server side`,\n },\n });\n }", "docstring": "/**\n * Send query to stats service\n * @param chain chain\n * @param requestBody Data that is expected\n * @returns StatsQueryResult Query result\n * @throws ApiError\n */", "url": "https://github.com/tonkeeper/tonkeeper-web/blob/c8b92adbbdf6124fac1d83d30ab0376e9a6aad76/packages/core/src/tonConsoleApi/services/StatsServiceService.ts#L81-L109", "sha": "c8b92adbbdf6124fac1d83d30ab0376e9a6aad76"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerMediaQueryList.destroy", "code": "destroy() {\n this.deactivate();\n this._listeners = [];\n }", "docstring": "/**\n * Destroy the current list by deactivating the\n * listeners and clearing the internal list\n */", "url": "https://github.com/ngbracket/ngx-layout/blob/525147da88c960f36c959044084cae25efe55b2e/projects/libs/flex-layout/server/server-match-media.ts#L45-L48", "sha": "525147da88c960f36c959044084cae25efe55b2e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Link.withHeader", "code": "public withHeader(header: string) {\n return Link.header(this.path, header, this.embed, this.display);\n }", "docstring": "/** Convert a file link into a link to a specific header. */", "url": "https://github.com/Leonezz/obsidian-tasks-calendar-wrapper/blob/1242f358c95a7b90da74b325e4651a198d9cb1d8/dataview-util/markdown.ts#L128-L130", "sha": "1242f358c95a7b90da74b325e4651a198d9cb1d8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WitnessTester.expectConstraintCount", "code": "async expectConstraintCount(expected: number, exact?: boolean) {\n const count = await this.getConstraintCount();\n if (count < expected) {\n throw new AssertionError({\n message: 'Circuit is under-constrained',\n expected,\n actual: count,\n });\n }\n\n if (exact && count !== expected) {\n throw new AssertionError({\n message: 'Circuit is over-constrained',\n expected,\n actual: count,\n });\n }\n }", "docstring": "/** Asserts that the circuit has enough constraints.\n *\n * By default, this function checks if there **at least** `expected` many constraints in the circuit.\n * If `exact` option is set to `true`, it will also check if the number of constraints is exactly equal to\n * the `expected` amount.\n *\n * If first check fails, it means the circuit is under-constrained. If the second check fails, it means\n * the circuit is over-constrained.\n */", "url": "https://github.com/erhant/circomkit/blob/b1a8ba682adf6d866ade2d53e3636f13a1d7403f/src/testers/witnessTester.ts#L65-L82", "sha": "b1a8ba682adf6d866ade2d53e3636f13a1d7403f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TextInputEvents.onKeyDown", "code": "private static onKeyDown(characterLimit: number, event: KeyboardEvent) {\n const inputElement = event.target as HTMLElement;\n const textContent = inputElement.textContent;\n if (textContent && textContent.length >= characterLimit\n && !TextInputEvents.PERMITTED_KEYS.has(event.key) && !TextInputEvents.isKeyCombinationPermitted(event)) {\n event.preventDefault();\n }\n }", "docstring": "// prettier-ignore", "url": "https://github.com/OvidijusParsiunas/deep-chat/blob/80c7386ddb94ba0dacfbd256a2f865cab1628264/component/src/views/chat/input/textInput/textInputEvents.ts#L28-L35", "sha": "80c7386ddb94ba0dacfbd256a2f865cab1628264"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LMDBMultiMap.set", "code": "set(key: K, val: V): Promise {\n return execInWriteTx(this.store, tx => tx.setIndex(serializeKey(this.prefix, key), this.encoder.pack(val)));\n }", "docstring": "/**\n * Sets the value at the given key.\n * @param key - The key to set the value at\n * @param val - The value to set\n */", "url": "https://github.com/AztecProtocol/aztec-packages/blob/f418456a84b9387a9d7bf76026a60b7a0f747149/yarn-project/kv-store/src/lmdb-v2/map.ts#L128-L130", "sha": "f418456a84b9387a9d7bf76026a60b7a0f747149"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AztecLmdbStore.openMultiMap", "code": "openMultiMap(name: string): AztecMultiMap & AztecAsyncMultiMap {\n return new LmdbAztecMap(this.#multiMapData, name);\n }", "docstring": "/**\n * Creates a new AztecMultiMap in the store. A multi-map stores multiple values for a single key automatically.\n * @param name - Name of the map\n * @returns A new AztecMultiMap\n */", "url": "https://github.com/AztecProtocol/aztec-packages/blob/f418456a84b9387a9d7bf76026a60b7a0f747149/yarn-project/kv-store/src/lmdb/store.ts#L121-L123", "sha": "f418456a84b9387a9d7bf76026a60b7a0f747149"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "StandardIndexedTree.getLatestLeafPreimageCopy", "code": "public getLatestLeafPreimageCopy(index: bigint, includeUncommitted: boolean): IndexedTreeLeafPreimage | undefined {\n const preimage = !includeUncommitted\n ? this.getDbPreimage(index)\n : this.getCachedPreimage(index) ?? this.getDbPreimage(index);\n return preimage && this.leafPreimageFactory.clone(preimage);\n }", "docstring": "/**\n * Gets the latest LeafPreimage copy.\n * @param index - Index of the leaf of which to obtain the LeafPreimage copy.\n * @param includeUncommitted - If true, the uncommitted changes are included in the search.\n * @returns A copy of the leaf preimage at the given index or undefined if the leaf was not found.\n */", "url": "https://github.com/AztecProtocol/aztec-packages/blob/f418456a84b9387a9d7bf76026a60b7a0f747149/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts#L217-L222", "sha": "f418456a84b9387a9d7bf76026a60b7a0f747149"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProvingOrchestrator.getBlock", "code": "public getBlock(index: number): L2Block {\n const block = this.provingState?.blocks[index]?.block;\n if (!block) {\n throw new Error(`Block at index ${index} not available`);\n }\n return block;\n }", "docstring": "/** Returns the block as built for a given index. */", "url": "https://github.com/AztecProtocol/aztec-packages/blob/f418456a84b9387a9d7bf76026a60b7a0f747149/yarn-project/prover-client/src/orchestrator/orchestrator.ts#L288-L294", "sha": "f418456a84b9387a9d7bf76026a60b7a0f747149"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ContractDataOracle.getFunctionArtifactByName", "code": "public async getFunctionArtifactByName(\n contractAddress: AztecAddress,\n functionName: string,\n ): Promise {\n const tree = await this.getTreeForAddress(contractAddress);\n return tree.getArtifact().functions.find(f => f.name === functionName);\n }", "docstring": "/**\n * Retrieves the artifact of a specified function within a given contract.\n * The function is identified by its name, which is unique within a contract.\n * Throws if the contract has not been added to the database.\n *\n * @param contractAddress - The AztecAddress representing the contract containing the function.\n * @param functionName - The name of the function.\n * @returns The corresponding function's artifact as an object\n */", "url": "https://github.com/AztecProtocol/aztec-packages/blob/f418456a84b9387a9d7bf76026a60b7a0f747149/yarn-project/pxe/src/contract_data_oracle/index.ts#L72-L78", "sha": "f418456a84b9387a9d7bf76026a60b7a0f747149"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ClientExecutionContext.loadFromExecutionCache", "code": "public override loadFromExecutionCache(hash: Fr): Promise {\n return Promise.resolve(this.executionCache.getPreimage(hash));\n }", "docstring": "/**\n * Gets values from the execution cache.\n * @param hash - Hash of the values.\n * @returns The values.\n */", "url": "https://github.com/AztecProtocol/aztec-packages/blob/f418456a84b9387a9d7bf76026a60b7a0f747149/yarn-project/simulator/src/client/client_execution_context.ts#L179-L181", "sha": "f418456a84b9387a9d7bf76026a60b7a0f747149"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CriticMarkupRanges.range_adjacent_to_cursor", "code": "range_adjacent_to_cursor(cursor: number, left: boolean, loose = false, include_edge = false) {\n\t\tconst ranges = left ? this.ranges.slice().reverse() : this.ranges;\n\t\tif (include_edge) {\n\t\t\treturn ranges.find(range =>\n\t\t\t\tleft ? ((loose ? range.from : range.to) < cursor) : (cursor < (loose ? range.to : range.from))\n\t\t\t);\n\t\t} else {\n\t\t\treturn ranges.find(range =>\n\t\t\t\tleft ? ((loose ? range.from : range.to) <= cursor) : (cursor <= (loose ? range.to : range.from))\n\t\t\t);\n\t\t}\n\t}", "docstring": "/**\n\t * Get the range that is (not directly) adjacent to the cursor in given direction\n\t * @param cursor - Cursor position in the document\n\t * @param left - Whether to look left or right of the cursor\n\t * @param loose - Whether to include ranges that are partially adjacent to the cursor\n\t * @param include_edge - Whether to include the edges of the range\n\t */", "url": "https://github.com/Fevol/obsidian-criticmarkup/blob/407222c6e981d7a2acda110f75fd1fcfd5113626/src/editor/base/ranges/grouped_range.ts#L43-L54", "sha": "407222c6e981d7a2acda110f75fd1fcfd5113626"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DownDAL.mSpeedEvent", "code": "static mSpeedEvent(list: IAriaDownProgress[]) {\n const downingStore = useDowningStore()\n const settingStore = useSettingStore()\n const DowningList = downingStore.ListDataRaw\n\n if (list == undefined) list = []\n const dellist: string[] = []\n let hasSpeed = 0\n for (let n = 0; n < DowningList.length; n++) {\n if (DowningList[n].Down.DownSpeedStr != '') {\n const gid = DowningList[n].Info.GID\n let isFind = false\n for (let m = 0; m < list.length; m++) {\n if (list[m].gid != gid) continue\n if (list[m].gid == gid && list[m].status == 'active') {\n isFind = true\n break\n }\n }\n if (!isFind) {\n if (DowningList[n].Down.DownState != '已暂停') DowningList[n].Down.DownState = '队列中'\n DowningList[n].Down.DownSpeed = 0\n DowningList[n].Down.DownSpeedStr = ''\n }\n }\n }\n const ariaRemote = !settingStore.AriaIsLocal\n\n const saveList: IStateDownFile[] = []\n for (let i = 0; i < list.length; i++) {\n try {\n const gid = list[i].gid\n const isComplete = list[i].status === 'complete'\n const isDowning = isComplete || list[i].status === 'active' || list[i].status === 'waiting'\n const isStop = list[i].status === 'paused' || list[i].status === 'removed'\n const isError = list[i].status === 'error'\n\n for (let j = 0; j < DowningList.length; j++) {\n if (DowningList[j].Info.ariaRemote != ariaRemote) continue\n if (DowningList[j].Info.GID == gid) {\n const downItem = DowningList[j]\n const down = downItem.Down\n const totalLength = parseInt(list[i].totalLength) || 0\n down.DownSize = parseInt(list[i].completedLength) || 0\n down.DownSpeed = parseInt(list[i].downloadSpeed) || 0\n down.DownSpeedStr = humanSize(down.DownSpeed) + '/s'\n down.DownProcess = Math.floor((down.DownSize * 100) / (totalLength + 1)) % 100\n\n down.IsCompleted = isComplete\n down.IsDowning = isDowning\n down.IsFailed = isError\n down.IsStop = isStop\n\n if (list[i].errorCode && list[i].errorCode != '0') {\n down.FailedCode = parseInt(list[i].errorCode) || 0\n down.FailedMessage = FormatAriaError(list[i].errorCode, list[i].errorMessage)\n }\n\n if (isComplete) {\n down.DownSize = downItem.Info.size\n down.DownSpeed = 0\n down.DownSpeedStr = ''\n down.DownProcess = 100\n down.FailedCode = 0\n down.FailedMessage = ''\n\n down.DownState = '校验中'\n const check = AriaHashFile(downItem)\n if (check.Check) {\n if (useSettingStore().downFinishAudio && !sound.playing()) {\n sound.play()\n }\n downingStore.mUpdateDownState({\n DownID: check.DownID,\n DownState: '已完成',\n IsFailed: false,\n IsDowning: true,\n IsStop: false,\n IsCompleted: true,\n FailedMessage: ''\n })\n } else {\n downingStore.mUpdateDownState({\n DownID: check.DownID,\n DownState: '已出错',\n IsFailed: true,\n IsDowning: false,\n IsStop: true,\n IsCompleted: false,\n FailedMessage: '移动文件失败,请重新下载'\n })\n }\n } else if (isStop) {\n down.DownState = '已暂停'\n down.DownSpeed = 0\n down.DownSpeedStr = ''\n down.FailedCode = 0\n down.FailedMessage = ''\n } else if (isStop || isError) {\n down.DownState = '已出错'\n down.DownSpeed = 0\n down.DownSpeedStr = ''\n down.AutoTry = Date.now()\n if (down.FailedMessage == '') down.FailedMessage = '下载失败'\n } else if (isDowning) {\n hasSpeed += down.DownSpeed\n let lasttime = ((totalLength - down.DownSize) / (down.DownSpeed + 1)) % 356400\n if (lasttime < 1) lasttime = 1\n down.DownState =\n down.DownProcess.toString() +\n '% ' +\n (lasttime / 3600).toFixed(0).padStart(2, '0') +\n ':' +\n ((lasttime % 3600) / 60).toFixed(0).padStart(2, '0') +\n ':' +\n (lasttime % 60).toFixed(0).padStart(2, '0')\n if (SaveTimeWait > 10) saveList.push(downItem)\n } else {\n //console.log('update', DowningList[j]);\n }\n if (isStop || isError) {\n dellist.push(gid)\n }\n downingStore.mRefreshListDataShow(true)\n break\n }\n }\n } catch {\n }\n }\n\n if (saveList.length > 0) DBDown.saveDownings(JSON.parse(JSON.stringify(saveList)))\n if (dellist.length > 0) AriaDeleteList(dellist).then()\n if (SaveTimeWait > 10) SaveTimeWait = 0\n else SaveTimeWait++\n useFootStore().mSaveDownTotalSpeedInfo(hasSpeed && humanSizeSpeed(hasSpeed) || '')\n }", "docstring": "/**\n * 速度事件方法\n */", "url": "https://github.com/gaozhangmin/aliyunpan/blob/6fc8b1f9c5fa2bccc84143d4bd68d2b2b4c0c757/src/down/DownDAL.ts#L343-L479", "sha": "6fc8b1f9c5fa2bccc84143d4bd68d2b2b4c0c757"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Index.size", "code": "size(): number {\n return this.#compiledIndex.size();\n }", "docstring": "/**\n * Returns the number of vectors currently indexed.\n * @return {number} The number of vectors currently indexed.\n */", "url": "https://github.com/unum-cloud/usearch/blob/306d6646b8f539cee6c3fa11879dd3bc0edfa31f/javascript/usearch.ts#L472-L474", "sha": "306d6646b8f539cee6c3fa11879dd3bc0edfa31f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DataBinding.parseExp", "code": "private parseExp(exp: string) {\n const res = { isOK: false, code: undefined }\n if (!exp) return res\n try {\n const ast = parser.parse(`var _=(${exp})`)\n res.isOK = true\n traverse.default(ast, {\n ReferencedIdentifier: (path) => {\n // 检索出引用标识符\n this._bindingVars.add(path.node.name)\n },\n SequenceExpression() {\n res.isOK = false\n }\n } as any)\n } catch (e) {\n res.isOK = false\n }\n return res\n }", "docstring": "// 提取变量", "url": "https://github.com/eleme/morjs/blob/c1fd96bf0e5dc1178d56a1aecb4e524f4a4751a3/packages/plugin-compiler-web/src/compiler/core/axml2/ast/data-binding/index.ts#L116-L135", "sha": "c1fd96bf0e5dc1178d56a1aecb4e524f4a4751a3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "View.watchTouchMove", "code": "watchTouchMove() {\n // 防止重复监听,导致多次触发事件\n if (this.isWatchTouchMove) return\n this.isWatchTouchMove = true\n\n const parent: Element = this.getScrollParent()\n const callback = (ratio) => {\n if (ratio >= 0.5 && this.lastTrigger !== 1) {\n this.lastTrigger = 1\n if (!this.hasAppeared) {\n this.dispatchEvent(new CustomEvent('firstappear'))\n this.hasAppeared = true\n }\n this.dispatchEvent(new CustomEvent('appear'))\n }\n if (ratio < 0.5 && this.lastTrigger === 1) {\n this.lastTrigger = 0\n this.dispatchEvent(new CustomEvent('disappear'))\n }\n }\n\n requestAnimationFrame(() => {\n // 为了确保元素渲染出来调用\n const ratio = getElementVisibleRatio(this)\n callback(ratio)\n })\n\n this.listener = throttle(\n () => {\n const ratio = getElementVisibleRatio(this)\n callback(ratio)\n },\n 66,\n { leading: true, trailing: true }\n )\n\n parent.addEventListener('scroll', this.listener)\n }", "docstring": "//是否已经显示过", "url": "https://github.com/eleme/morjs/blob/c1fd96bf0e5dc1178d56a1aecb4e524f4a4751a3/packages/runtime-web/src/components/src/views/view/index.ts#L77-L114", "sha": "c1fd96bf0e5dc1178d56a1aecb4e524f4a4751a3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Takin.runExtendedRunner", "code": "private async runExtendedRunner(options: RunnerOptions): Promise {\n const RunnerExtended = this.hooks.extendRunner.call(Runner, options)\n const runner = new RunnerExtended(\n options.config,\n options.userConfig,\n options.context\n )\n this.currentRunners.add(runner)\n await runner.run(options.command, options.plugins)\n return runner\n }", "docstring": "/**\n * 执行扩展 Runner 方法\n */", "url": "https://github.com/eleme/morjs/blob/c1fd96bf0e5dc1178d56a1aecb4e524f4a4751a3/packages/takin/src/takin.ts#L224-L234", "sha": "c1fd96bf0e5dc1178d56a1aecb4e524f4a4751a3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BondContract.viewAddressBondsInfo", "code": "async viewAddressBondsInfo(address: IAddress): Promise<{\n totalStakedAmount: BigNumber.Value;\n userStakedAmount: BigNumber.Value;\n livelinessScore: number;\n }> {\n const interaction = this.contract.methodsExplicit.getAddressBondsInfo([\n new AddressValue(address)\n ]);\n const query = interaction.buildQuery();\n const queryResponse = await this.networkProvider.queryContract(query);\n const endpointDefinition = interaction.getEndpoint();\n const { firstValue, returnCode } = new ResultsParser().parseQueryResponse(\n queryResponse,\n endpointDefinition\n );\n if (returnCode.isSuccess()) {\n const returnValue = firstValue?.valueOf();\n return {\n totalStakedAmount: returnValue.field0.valueOf(),\n userStakedAmount: returnValue.field1.valueOf(),\n livelinessScore: new BigNumber(returnValue.field2.valueOf())\n .div(100)\n .toNumber()\n };\n } else {\n throw new ErrContractQuery('viewAddressBondsInfo', returnCode.toString());\n }\n }", "docstring": "/**\n * Returns the address bonds info\n * @param address address to query\n *\n */", "url": "https://github.com/Itheum/sdk-mx-data-nft/blob/217bc6343c4cdcb40330894f9362a280b55c4907/src/bond.ts#L137-L164", "sha": "217bc6343c4cdcb40330894f9362a280b55c4907"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getTmpZombiePath", "code": "function getTmpZombiePath() {\n return process.env.MOON_ZOMBIE_DIR;\n}", "docstring": "/// Returns the /tmp/zombie-52234... path", "url": "https://github.com/moondance-labs/tanssi/blob/67bda97356c372ea9c19b1775b0901b5f8283622/test/suites/zombie_tanssi/test_zombie_tanssi.ts#L499-L501", "sha": "67bda97356c372ea9c19b1775b0901b5f8283622"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Field_custom.defaultValue", "code": "get defaultValue(): T {\n return this.config.defaultValue()\n }", "docstring": "// #region Changes", "url": "https://github.com/rvion/CushyStudio/blob/142941557b471d65a1819014059798ed9f6535b3/src/csuite/fields/custom/FieldCustom.tsx#L113-L115", "sha": "142941557b471d65a1819014059798ed9f6535b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Field_matrix.setOwnSerial", "code": "protected setOwnSerial(next: Field_matrix_serial): void {\n this.assignNewSerial(next)\n\n const cells = this.serial.selected ?? this.config.default ?? []\n const selectedCells = new Set(cells.map(({ row, col }) => this.getCellkey(row, col)))\n\n // make sure every cell has the right value\n for (const [x, row] of this.config.rows.entries()) {\n for (const [y, col] of this.config.cols.entries()) {\n const cellKey = this.getCellkey(row, col)\n const value = selectedCells.has(cellKey)\n const prev = this.store.get(cellKey)\n if (prev == null) this.store.set(cellKey, { x, y, col, row, value })\n else prev.value = value\n }\n }\n\n this.patchSerial((draft) => void (draft.selected = this.activeCells))\n }", "docstring": "// #region Serial", "url": "https://github.com/rvion/CushyStudio/blob/142941557b471d65a1819014059798ed9f6535b3/src/csuite/fields/matrix/FieldMatrix.ts#L87-L105", "sha": "142941557b471d65a1819014059798ed9f6535b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Field_optional.setOff", "code": "setOff(): void {\n this.setActive(false)\n }", "docstring": "/** set the value to false */", "url": "https://github.com/rvion/CushyStudio/blob/142941557b471d65a1819014059798ed9f6535b3/src/csuite/fields/optional/FieldOptional.tsx#L233-L235", "sha": "142941557b471d65a1819014059798ed9f6535b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ComfySchemaL.pythonModuleByNodeNameInCushy", "code": "get pythonModuleByNodeNameInCushy(): Map { return this.parseObjectInfo.pythonModuleByNodeNameInCushy } // prettier-ignore", "docstring": "// forward to underlying parsedObjectInfo", "url": "https://github.com/rvion/CushyStudio/blob/142941557b471d65a1819014059798ed9f6535b3/src/models/ComfySchema.ts#L53-L53", "sha": "142941557b471d65a1819014059798ed9f6535b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CushyScriptL.getExecutable_orExtract", "code": "getExecutable_orExtract(appID: CushyAppID): Maybe {\n if (this._EXECUTABLES) return this._EXECUTABLES.find((executable) => appID === executable.appID)\n this.evaluateAndUpdateAppsAndViews()\n return this._EXECUTABLES!.find((executable) => appID === executable.appID)\n }", "docstring": "/** more costly variation of getExecutable_orNull */", "url": "https://github.com/rvion/CushyStudio/blob/142941557b471d65a1819014059798ed9f6535b3/src/models/CushyScript.ts#L143-L147", "sha": "142941557b471d65a1819014059798ed9f6535b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CushyLayoutManager.addCustomV2", "code": "addCustomV2(fn: FC, props: T): void {\n const uid = uniqueIDByMemoryRef(fn)\n const panel = registerCustomPanel(uid, fn)\n this.open('Custom', { uid: panel.uid, props })\n }", "docstring": "/**\n * @experimental\n * @unstable\n */", "url": "https://github.com/rvion/CushyStudio/blob/142941557b471d65a1819014059798ed9f6535b3/src/router/Layout.tsx#L924-L928", "sha": "142941557b471d65a1819014059798ed9f6535b3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "slugify", "code": "function slugify(str: string | undefined): string | undefined {\n\tif (!str) return;\n\treturn str\n\t\t.toString()\n\t\t.trim()\n\t\t.toLowerCase()\n\t\t.replace(/[^\\w ]+/g, '')\n\t\t.replace(/ +/g, '-');\n}", "docstring": "// Slugify a string for hyphens and underscores", "url": "https://github.com/directus-labs/agency-os/blob/78d36dddf74289719249053165cd879cf9fdbeea/utils/strings.ts#L28-L36", "sha": "78d36dddf74289719249053165cd879cf9fdbeea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ListEvent.list_music_update", "code": "async list_music_update(userName: string, musicInfos: LX.List.ListActionMusicUpdate, isRemote: boolean = false) {\n const userSpace = getUserSpace(userName)\n // const changedIds =\n await userSpace.listManage.listDataManage.listMusicUpdateInfo(musicInfos)\n // await checkUpdateList(changedIds)\n this.emit('list_music_update', userName, musicInfos, isRemote)\n listUpdated()\n }", "docstring": "/**\n * 批量更新歌曲信息\n * @param musicInfos 歌曲&列表信息\n * @param isRemote 是否属于远程操作\n */", "url": "https://github.com/lyswhut/lx-music-sync-server/blob/f483d5fcebfe883141b56fcf08b63d741bb0954b/src/modules/list/event.ts#L187-L194", "sha": "f483d5fcebfe883141b56fcf08b63d741bb0954b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NameRegistry.resolveName", "code": "resolveName(name: string): Address | undefined {\n return this.nameToReg.get(name)?.addr;\n }", "docstring": "/** Find wallet address for a given Daimo name, or undefined if not found. */", "url": "https://github.com/daimo-eth/daimo/blob/a960ddbbc0cb486f21b8460d22cebefc6376aac9/packages/daimo-api/src/contract/nameRegistry.ts#L205-L207", "sha": "a960ddbbc0cb486f21b8460d22cebefc6376aac9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isCommonjs", "code": "const isCommonjs = (code: string) =>\n /\\b(?:require|module|exports)\\b/.test(\n code.replace(/\\/\\*(.|[\\r\\n])*?\\*\\//gm, '').replace(/\\/\\/.*/g, ''),\n );", "docstring": "// https://github.com/vite-plugin/vite-plugin-commonjs/blob/5e3294e78fabb037e12aab75433908fbee17192a/src/utils.ts#L9-L15", "url": "https://github.com/dai-shi/waku/blob/06577a046cf0cbed13f11a4f3251fe525a1ffa25/packages/waku/src/lib/plugins/vite-plugin-dev-commonjs.ts#L8-L11", "sha": "06577a046cf0cbed13f11a4f3251fe525a1ffa25"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ConfigurationService.getCacheConfiguration", "code": "public getCacheConfiguration(): CancelablePromise<{\n result: {\n clearCacheCronSchedule: string;\n };\n }> {\n return this.httpRequest.request({\n method: 'GET',\n url: '/configuration/cache',\n errors: {\n 400: `Bad Request`,\n 404: `Not Found`,\n 500: `Internal Server Error`,\n },\n });\n }", "docstring": "/**\n * Get cache configuration\n * Get cache configuration\n * @returns any Default Response\n * @throws ApiError\n */", "url": "https://github.com/thirdweb-dev/engine/blob/07d1d7d3b5c28c5977d2b7457bf6ba33c22692d6/sdk/src/services/ConfigurationService.ts#L554-L568", "sha": "07d1d7d3b5c28c5977d2b7457bf6ba33c22692d6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getCurrentNonceState", "code": "async function getCurrentNonceState(\n walletAddress: Address,\n chainId: number,\n): Promise {\n const [onchainNonce, largestSentNonce] = await Promise.all([\n getLastUsedOnchainNonce(chainId, walletAddress),\n inspectNonce(chainId, walletAddress),\n ]);\n\n return {\n onchainNonce: onchainNonce,\n largestSentNonce: largestSentNonce,\n };\n}", "docstring": "// Get current nonce state", "url": "https://github.com/thirdweb-dev/engine/blob/07d1d7d3b5c28c5977d2b7457bf6ba33c22692d6/src/worker/tasks/nonce-health-check-worker.ts#L104-L117", "sha": "07d1d7d3b5c28c5977d2b7457bf6ba33c22692d6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "readInfoBits", "code": "function readInfoBits(b: Bitmap) {\n const readBit = (x: number, y: number, out: number) => (out << 1) | (b.data[y][x] ? 1 : 0);\n const size = b.height;\n // Version information\n let version1 = 0;\n for (let y = 5; y >= 0; y--)\n for (let x = size - 9; x >= size - 11; x--) version1 = readBit(x, y, version1);\n let version2 = 0;\n for (let x = 5; x >= 0; x--)\n for (let y = size - 9; y >= size - 11; y--) version2 = readBit(x, y, version2);\n // Format information\n let format1 = 0;\n for (let x = 0; x < 6; x++) format1 = readBit(x, 8, format1);\n format1 = readBit(7, 8, format1);\n format1 = readBit(8, 8, format1);\n format1 = readBit(8, 7, format1);\n for (let y = 5; y >= 0; y--) format1 = readBit(8, y, format1);\n let format2 = 0;\n for (let y = size - 1; y >= size - 7; y--) format2 = readBit(8, y, format2);\n for (let x = size - 8; x < size; x++) format2 = readBit(x, 8, format2);\n return { version1, version2, format1, format2 };\n}", "docstring": "// Same as in drawTemplate, but reading", "url": "https://github.com/paulmillr/qr/blob/0681dd8b2934e1c13d957a5a8b61321f64f37661/src/decode.ts#L664-L685", "sha": "0681dd8b2934e1c13d957a5a8b61321f64f37661"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Bitmap.scale", "code": "scale(factor: number): Bitmap {\n if (!Number.isSafeInteger(factor) || factor > 1024)\n throw new Error(`invalid scale factor: ${factor}`);\n const { height, width } = this;\n const res = new Bitmap({ height: factor * height, width: factor * width });\n return res.rect(\n { x: 0, y: 0 },\n Infinity,\n ({ x, y }) => this.data[Math.floor(y / factor)][Math.floor(x / factor)]\n );\n }", "docstring": "// Each pixel size is multiplied by factor", "url": "https://github.com/paulmillr/qr/blob/0681dd8b2934e1c13d957a5a8b61321f64f37661/src/index.ts#L284-L294", "sha": "0681dd8b2934e1c13d957a5a8b61321f64f37661"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "openNewCamera", "code": "const openNewCamera = () => {\n window.api.openNewCamera()\n }", "docstring": "//打开新摄像头", "url": "https://github.com/houdunwang/camera/blob/543ca73de6ef6bce6df3409b148be3e8b8247340/src/renderer/src/composables/useCamera.ts#L16-L18", "sha": "543ca73de6ef6bce6df3409b148be3e8b8247340"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "deleteSessionData", "code": "const deleteSessionData = async (payload: SessionData) => {\n if (sessionDataList.value.length === 1) {\n await deleteSession()\n } else {\n const { id, session_id } = payload\n const sql = `DELETE FROM session_data WHERE id = '${id}' AND session_id = '${session_id}';`\n await executeSQL(sql)\n }\n\n Message.success(t('message.deleteSuccess'))\n\n getSessionData()\n }", "docstring": "// 删除一条对话数据", "url": "https://github.com/Synaptrix/ChatGPT-Desktop/blob/fea046838a6a0b3655f9d4ed6e15f9f8fa15ffd2/src/stores/session.ts#L129-L141", "sha": "fea046838a6a0b3655f9d4ed6e15f9f8fa15ffd2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Commenter.addReviewedCommitId", "code": "addReviewedCommitId(commentBody: string, commitId: string): string {\n const start = commentBody.indexOf(COMMIT_ID_START_TAG)\n const end = commentBody.indexOf(COMMIT_ID_END_TAG)\n if (start === -1 || end === -1) {\n return `${commentBody}\\n${COMMIT_ID_START_TAG}\\n\\n${COMMIT_ID_END_TAG}`\n }\n const ids = commentBody.substring(start + COMMIT_ID_START_TAG.length, end)\n return `${commentBody.substring(\n 0,\n start + COMMIT_ID_START_TAG.length\n )}${ids}\\n${commentBody.substring(end)}`\n }", "docstring": "// if the marker doesn't exist, add it", "url": "https://github.com/coderabbitai/ai-pr-reviewer/blob/d5ec3970b3acc4b9d673e6cd601bf4d3cf043b55/src/commenter.ts#L706-L717", "sha": "d5ec3970b3acc4b9d673e6cd601bf4d3cf043b55"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Group.members", "code": "async members(): Promise {\n return [new User()];\n }", "docstring": "/** @gqlField */", "url": "https://github.com/captbaritone/grats/blob/7fe027351ba554ddb3ccd13919fd58c7c6bef67f/examples/yoga/models/Group.ts#L17-L19", "sha": "7fe027351ba554ddb3ccd13919fd58c7c6bef67f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MyFunc", "code": "function MyFunc() {}", "docstring": "/** @gqlType */", "url": "https://github.com/captbaritone/grats/blob/7fe027351ba554ddb3ccd13919fd58c7c6bef67f/src/tests/fixtures/type_definitions/TagAttachedToWrongNode.ts#L2-L2", "sha": "7fe027351ba554ddb3ccd13919fd58c7c6bef67f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "User.myField", "code": "myField(): string {\n return \"Hello World\";\n }", "docstring": "/** @gqlField */", "url": "https://github.com/captbaritone/grats/blob/7fe027351ba554ddb3ccd13919fd58c7c6bef67f/website/docs/04-docblock-tags/snippets/02-property-and-method.grats.ts#L10-L12", "sha": "7fe027351ba554ddb3ccd13919fd58c7c6bef67f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "debounced", "code": "const debounced = (): void => {\n // Base64 decode the data\n const data: DataStore = store.data || {};\n\n // Loop over each stored capability\n for (const name of Object.keys(this.#stores)) {\n // Get the prefix offset for the keys\n const offset = `${name}-`.length;\n\n // Get any keys that match the capability name prefix\n const filtered: DataStore = {};\n\n // Loop over each key in the secret\n for (const key of Object.keys(data)) {\n // Match on the capability name as a prefix\n if (startsWith(name, key)) {\n // Strip the prefix and store the value\n filtered[key.slice(offset)] = data[key];\n }\n }\n\n // Send the data to the receiver callback\n this.#stores[name].receive(filtered);\n }\n\n // Call the onReady callback if this is the first time the secret has been read\n if (this.#onReady) {\n this.#onReady();\n this.#onReady = undefined;\n }\n };", "docstring": "// Wrap the update in a debounced function", "url": "https://github.com/defenseunicorns/pepr/blob/939e93673dde79ba6ba70c8961c6e1fcc1a70dbd/src/lib/controller/store.ts#L113-L143", "sha": "939e93673dde79ba6ba70c8961c6e1fcc1a70dbd"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Positions.confirmHeight", "code": "confirmHeight(_i: number, heightGetter: THeightGetter) {\n let i = _i;\n if (i > this.lastI) {\n this.calcHeights(i, heightGetter);\n return;\n }\n const h = heightGetter(i);\n if (h === this.heights[i]) {\n return;\n }\n const chg = h - this.heights[i];\n this.heights[i] = h;\n // shift the y positions by `chg` for all known y positions\n while (++i <= this.lastI) {\n this.ys[i] += chg;\n }\n if (this.ys[this.lastI + 1] != null) {\n this.ys[this.lastI + 1] += chg;\n }\n }", "docstring": "/**\n * Get the latest height for index `_i`. If it's in new terretory\n * (_i > lastI), find the heights (and y-values) leading up to it. If it's in\n * known territory (_i <= lastI) and the height is different than what is\n * known, recalculate subsequent y values, but don't confirm the heights of\n * those items, just update based on the difference.\n */", "url": "https://github.com/n9e/fe/blob/a99fb8353f00bdf6e79faabb1950e3b1f94fa700/src/pages/traceCpt/Detail/Timeline/ListView/Positions.tsx#L112-L131", "sha": "a99fb8353f00bdf6e79faabb1950e3b1f94fa700"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TransformedTargetRegressor.transformer_", "code": "get transformer_(): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This TransformedTargetRegressor instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'TransformedTargetRegressor must call init() before accessing transformer_'\n )\n }\n\n return (async () => {\n // invoke accessor\n await this._py\n .ex`attr_TransformedTargetRegressor_transformer_ = bridgeTransformedTargetRegressor[${this.id}].transformer_`\n\n // convert the result from python to node.js\n return this\n ._py`attr_TransformedTargetRegressor_transformer_.tolist() if hasattr(attr_TransformedTargetRegressor_transformer_, 'tolist') else attr_TransformedTargetRegressor_transformer_`\n })()\n }", "docstring": "/**\n Transformer used in [`fit`](https://scikit-learn.org/stable/modules/generated/#sklearn.compose.TransformedTargetRegressor.fit \"sklearn.compose.TransformedTargetRegressor.fit\") and [`predict`](https://scikit-learn.org/stable/modules/generated/#sklearn.compose.TransformedTargetRegressor.predict \"sklearn.compose.TransformedTargetRegressor.predict\").\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/compose/TransformedTargetRegressor.ts#L366-L388", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EllipticEnvelope.mahalanobis", "code": "async mahalanobis(opts: {\n /**\n The observations, the Mahalanobis distances of the which we compute. Observations are assumed to be drawn from the same distribution than the data used in fit.\n */\n X?: ArrayLike[]\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This EllipticEnvelope instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error('EllipticEnvelope must call init() before mahalanobis()')\n }\n\n // set up method params\n await this._py\n .ex`pms_EllipticEnvelope_mahalanobis = {'X': np.array(${opts['X'] ?? undefined}) if ${opts['X'] !== undefined} else None}\n\npms_EllipticEnvelope_mahalanobis = {k: v for k, v in pms_EllipticEnvelope_mahalanobis.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_EllipticEnvelope_mahalanobis = bridgeEllipticEnvelope[${this.id}].mahalanobis(**pms_EllipticEnvelope_mahalanobis)`\n\n // convert the result from python to node.js\n return this\n ._py`res_EllipticEnvelope_mahalanobis.tolist() if hasattr(res_EllipticEnvelope_mahalanobis, 'tolist') else res_EllipticEnvelope_mahalanobis`\n }", "docstring": "/**\n Compute the squared Mahalanobis distances of given observations.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/covariance/EllipticEnvelope.ts#L418-L447", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GraphicalLassoCV.score", "code": "async score(opts: {\n /**\n Test data of which we compute the likelihood, where `n_samples` is the number of samples and `n_features` is the number of features. `X_test` is assumed to be drawn from the same distribution than the data used in fit (including centering).\n */\n X_test?: ArrayLike[]\n\n /**\n Not used, present for API consistency by convention.\n */\n y?: any\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This GraphicalLassoCV instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error('GraphicalLassoCV must call init() before score()')\n }\n\n // set up method params\n await this._py\n .ex`pms_GraphicalLassoCV_score = {'X_test': np.array(${opts['X_test'] ?? undefined}) if ${opts['X_test'] !== undefined} else None, 'y': ${opts['y'] ?? undefined}}\n\npms_GraphicalLassoCV_score = {k: v for k, v in pms_GraphicalLassoCV_score.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_GraphicalLassoCV_score = bridgeGraphicalLassoCV[${this.id}].score(**pms_GraphicalLassoCV_score)`\n\n // convert the result from python to node.js\n return this\n ._py`res_GraphicalLassoCV_score.tolist() if hasattr(res_GraphicalLassoCV_score, 'tolist') else res_GraphicalLassoCV_score`\n }", "docstring": "/**\n Compute the log-likelihood of `X_test` under the estimated Gaussian model.\n\n The Gaussian model is defined by its mean and covariance matrix which are represented respectively by `self.location_` and `self.covariance_`.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/covariance/GraphicalLassoCV.ts#L381-L415", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MinCovDet.get_precision", "code": "async get_precision(opts: {\n /**\n The precision matrix associated to the current covariance object.\n */\n precision_?: ArrayLike[]\n }): Promise {\n if (this._isDisposed) {\n throw new Error('This MinCovDet instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error('MinCovDet must call init() before get_precision()')\n }\n\n // set up method params\n await this._py\n .ex`pms_MinCovDet_get_precision = {'precision_': np.array(${opts['precision_'] ?? undefined}) if ${opts['precision_'] !== undefined} else None}\n\npms_MinCovDet_get_precision = {k: v for k, v in pms_MinCovDet_get_precision.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_MinCovDet_get_precision = bridgeMinCovDet[${this.id}].get_precision(**pms_MinCovDet_get_precision)`\n\n // convert the result from python to node.js\n return this\n ._py`res_MinCovDet_get_precision.tolist() if hasattr(res_MinCovDet_get_precision, 'tolist') else res_MinCovDet_get_precision`\n }", "docstring": "/**\n Getter for the precision matrix.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/covariance/MinCovDet.ts#L282-L309", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ShrunkCovariance.location_", "code": "get location_(): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This ShrunkCovariance instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'ShrunkCovariance must call init() before accessing location_'\n )\n }\n\n return (async () => {\n // invoke accessor\n await this._py\n .ex`attr_ShrunkCovariance_location_ = bridgeShrunkCovariance[${this.id}].location_`\n\n // convert the result from python to node.js\n return this\n ._py`attr_ShrunkCovariance_location_.tolist() if hasattr(attr_ShrunkCovariance_location_, 'tolist') else attr_ShrunkCovariance_location_`\n })()\n }", "docstring": "/**\n Estimated location, i.e. the estimated mean.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/covariance/ShrunkCovariance.ts#L430-L452", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IncrementalPCA.n_components_", "code": "get n_components_(): Promise {\n if (this._isDisposed) {\n throw new Error('This IncrementalPCA instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'IncrementalPCA must call init() before accessing n_components_'\n )\n }\n\n return (async () => {\n // invoke accessor\n await this._py\n .ex`attr_IncrementalPCA_n_components_ = bridgeIncrementalPCA[${this.id}].n_components_`\n\n // convert the result from python to node.js\n return this\n ._py`attr_IncrementalPCA_n_components_.tolist() if hasattr(attr_IncrementalPCA_n_components_, 'tolist') else attr_IncrementalPCA_n_components_`\n })()\n }", "docstring": "/**\n The estimated number of components. Relevant when `n_components=None`.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/decomposition/IncrementalPCA.ts#L711-L731", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DummyClassifier.classes_", "code": "get classes_(): Promise {\n if (this._isDisposed) {\n throw new Error('This DummyClassifier instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'DummyClassifier must call init() before accessing classes_'\n )\n }\n\n return (async () => {\n // invoke accessor\n await this._py\n .ex`attr_DummyClassifier_classes_ = bridgeDummyClassifier[${this.id}].classes_`\n\n // convert the result from python to node.js\n return this\n ._py`attr_DummyClassifier_classes_.tolist() if hasattr(attr_DummyClassifier_classes_, 'tolist') else attr_DummyClassifier_classes_`\n })()\n }", "docstring": "/**\n Unique class labels observed in `y`. For multi-output classification problems, this attribute is a list of arrays as each output has an independent set of possible classes.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/dummy/DummyClassifier.ts#L418-L438", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SequentialFeatureSelector.transform", "code": "async transform(opts: {\n /**\n The input samples.\n */\n X?: any\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This SequentialFeatureSelector instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'SequentialFeatureSelector must call init() before transform()'\n )\n }\n\n // set up method params\n await this._py\n .ex`pms_SequentialFeatureSelector_transform = {'X': np.array(${opts['X'] ?? undefined}) if ${opts['X'] !== undefined} else None}\n\npms_SequentialFeatureSelector_transform = {k: v for k, v in pms_SequentialFeatureSelector_transform.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_SequentialFeatureSelector_transform = bridgeSequentialFeatureSelector[${this.id}].transform(**pms_SequentialFeatureSelector_transform)`\n\n // convert the result from python to node.js\n return this\n ._py`res_SequentialFeatureSelector_transform.tolist() if hasattr(res_SequentialFeatureSelector_transform, 'tolist') else res_SequentialFeatureSelector_transform`\n }", "docstring": "/**\n Reduce X to the selected features.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/feature_selection/SequentialFeatureSelector.ts#L417-L448", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PartialDependenceDisplay.bars_", "code": "get bars_(): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This PartialDependenceDisplay instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'PartialDependenceDisplay must call init() before accessing bars_'\n )\n }\n\n return (async () => {\n // invoke accessor\n await this._py\n .ex`attr_PartialDependenceDisplay_bars_ = bridgePartialDependenceDisplay[${this.id}].bars_`\n\n // convert the result from python to node.js\n return this\n ._py`attr_PartialDependenceDisplay_bars_.tolist() if hasattr(attr_PartialDependenceDisplay_bars_, 'tolist') else attr_PartialDependenceDisplay_bars_`\n })()\n }", "docstring": "/**\n If `ax` is an axes or `undefined`, `bars_\\[i, j\\]` is the partial dependence bar plot on the i-th row and j-th column (for a categorical feature). If `ax` is a list of axes, `bars_\\[i\\]` is the partial dependence bar plot corresponding to the i-th item in `ax`. Elements that are `undefined` correspond to a nonexisting axes or an axes that does not include a bar plot.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/inspection/PartialDependenceDisplay.ts#L571-L593", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Nystroem.fit_transform", "code": "async fit_transform(opts: {\n /**\n Input samples.\n */\n X?: ArrayLike[]\n\n /**\n Target values (`undefined` for unsupervised transformations).\n */\n y?: ArrayLike\n\n /**\n Additional fit parameters.\n */\n fit_params?: any\n }): Promise {\n if (this._isDisposed) {\n throw new Error('This Nystroem instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error('Nystroem must call init() before fit_transform()')\n }\n\n // set up method params\n await this._py\n .ex`pms_Nystroem_fit_transform = {'X': np.array(${opts['X'] ?? undefined}) if ${opts['X'] !== undefined} else None, 'y': np.array(${opts['y'] ?? undefined}) if ${opts['y'] !== undefined} else None, 'fit_params': ${opts['fit_params'] ?? undefined}}\n\npms_Nystroem_fit_transform = {k: v for k, v in pms_Nystroem_fit_transform.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_Nystroem_fit_transform = bridgeNystroem[${this.id}].fit_transform(**pms_Nystroem_fit_transform)`\n\n // convert the result from python to node.js\n return this\n ._py`res_Nystroem_fit_transform.tolist() if hasattr(res_Nystroem_fit_transform, 'tolist') else res_Nystroem_fit_transform`\n }", "docstring": "/**\n Fit to data, then transform it.\n\n Fits transformer to `X` and `y` with optional parameters `fit_params` and returns a transformed version of `X`.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/kernel_approximation/Nystroem.ts#L185-L222", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ElasticNet.set_score_request", "code": "async set_score_request(opts: {\n /**\n Metadata routing for `sample_weight` parameter in `score`.\n */\n sample_weight?: string | boolean\n }): Promise {\n if (this._isDisposed) {\n throw new Error('This ElasticNet instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error('ElasticNet must call init() before set_score_request()')\n }\n\n // set up method params\n await this._py\n .ex`pms_ElasticNet_set_score_request = {'sample_weight': ${opts['sample_weight'] ?? undefined}}\n\npms_ElasticNet_set_score_request = {k: v for k, v in pms_ElasticNet_set_score_request.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_ElasticNet_set_score_request = bridgeElasticNet[${this.id}].set_score_request(**pms_ElasticNet_set_score_request)`\n\n // convert the result from python to node.js\n return this\n ._py`res_ElasticNet_set_score_request.tolist() if hasattr(res_ElasticNet_set_score_request, 'tolist') else res_ElasticNet_set_score_request`\n }", "docstring": "/**\n Request metadata passed to the `score` method.\n\n Note that this method is only relevant if `enable_metadata_routing=True` (see [`sklearn.set_config`](https://scikit-learn.org/stable/modules/generated/sklearn.set_config.html#sklearn.set_config \"sklearn.set_config\")). Please see [User Guide](https://scikit-learn.org/stable/modules/generated/../../metadata_routing.html#metadata-routing) on how the routing mechanism works.\n\n The options for each parameter are:\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/linear_model/ElasticNet.ts#L503-L530", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LarsCV.alpha_", "code": "get alpha_(): Promise {\n if (this._isDisposed) {\n throw new Error('This LarsCV instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error('LarsCV must call init() before accessing alpha_')\n }\n\n return (async () => {\n // invoke accessor\n await this._py.ex`attr_LarsCV_alpha_ = bridgeLarsCV[${this.id}].alpha_`\n\n // convert the result from python to node.js\n return this\n ._py`attr_LarsCV_alpha_.tolist() if hasattr(attr_LarsCV_alpha_, 'tolist') else attr_LarsCV_alpha_`\n })()\n }", "docstring": "/**\n the estimated regularization parameter alpha\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/linear_model/LarsCV.ts#L468-L485", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Lasso.fit", "code": "async fit(opts: {\n /**\n Data.\n\n Note that large sparse matrices and arrays requiring `int64` indices are not accepted.\n */\n X?: SparseMatrix\n\n /**\n Target. Will be cast to X’s dtype if necessary.\n */\n y?: NDArray\n\n /**\n Sample weights. Internally, the `sample_weight` vector will be rescaled to sum to `n_samples`.\n */\n sample_weight?: number | ArrayLike\n\n /**\n Allow to bypass several input checking. Don’t use this parameter unless you know what you do.\n\n @defaultValue `true`\n */\n check_input?: boolean\n }): Promise {\n if (this._isDisposed) {\n throw new Error('This Lasso instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error('Lasso must call init() before fit()')\n }\n\n // set up method params\n await this._py\n .ex`pms_Lasso_fit = {'X': ${opts['X'] ?? undefined}, 'y': np.array(${opts['y'] ?? undefined}) if ${opts['y'] !== undefined} else None, 'sample_weight': np.array(${opts['sample_weight'] ?? undefined}) if ${opts['sample_weight'] !== undefined} else None, 'check_input': ${opts['check_input'] ?? undefined}}\n\npms_Lasso_fit = {k: v for k, v in pms_Lasso_fit.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_Lasso_fit = bridgeLasso[${this.id}].fit(**pms_Lasso_fit)`\n\n // convert the result from python to node.js\n return this\n ._py`res_Lasso_fit.tolist() if hasattr(res_Lasso_fit, 'tolist') else res_Lasso_fit`\n }", "docstring": "/**\n Fit model with coordinate descent.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/linear_model/Lasso.ts#L166-L212", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LinearRegression.get_metadata_routing", "code": "async get_metadata_routing(opts: {\n /**\n A [`MetadataRequest`](https://scikit-learn.org/stable/modules/generated/sklearn.utils.metadata_routing.MetadataRequest.html#sklearn.utils.metadata_routing.MetadataRequest \"sklearn.utils.metadata_routing.MetadataRequest\") encapsulating routing information.\n */\n routing?: any\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This LinearRegression instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'LinearRegression must call init() before get_metadata_routing()'\n )\n }\n\n // set up method params\n await this._py\n .ex`pms_LinearRegression_get_metadata_routing = {'routing': ${opts['routing'] ?? undefined}}\n\npms_LinearRegression_get_metadata_routing = {k: v for k, v in pms_LinearRegression_get_metadata_routing.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_LinearRegression_get_metadata_routing = bridgeLinearRegression[${this.id}].get_metadata_routing(**pms_LinearRegression_get_metadata_routing)`\n\n // convert the result from python to node.js\n return this\n ._py`res_LinearRegression_get_metadata_routing.tolist() if hasattr(res_LinearRegression_get_metadata_routing, 'tolist') else res_LinearRegression_get_metadata_routing`\n }", "docstring": "/**\n Get metadata routing of this object.\n\n Please check [User Guide](https://scikit-learn.org/stable/modules/generated/../../metadata_routing.html#metadata-routing) on how the routing mechanism works.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/linear_model/LinearRegression.ts#L171-L202", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LogisticRegression.predict", "code": "async predict(opts: {\n /**\n The data matrix for which we want to get the predictions.\n */\n X?: ArrayLike | SparseMatrix[]\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This LogisticRegression instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error('LogisticRegression must call init() before predict()')\n }\n\n // set up method params\n await this._py\n .ex`pms_LogisticRegression_predict = {'X': np.array(${opts['X'] ?? undefined}) if ${opts['X'] !== undefined} else None}\n\npms_LogisticRegression_predict = {k: v for k, v in pms_LogisticRegression_predict.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_LogisticRegression_predict = bridgeLogisticRegression[${this.id}].predict(**pms_LogisticRegression_predict)`\n\n // convert the result from python to node.js\n return this\n ._py`res_LogisticRegression_predict.tolist() if hasattr(res_LogisticRegression_predict, 'tolist') else res_LogisticRegression_predict`\n }", "docstring": "/**\n Predict class labels for samples in X.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/linear_model/LogisticRegression.ts#L366-L395", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LocallyLinearEmbedding.set_output", "code": "async set_output(opts: {\n /**\n Configure output of `transform` and `fit_transform`.\n */\n transform?: 'default' | 'pandas' | 'polars'\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This LocallyLinearEmbedding instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'LocallyLinearEmbedding must call init() before set_output()'\n )\n }\n\n // set up method params\n await this._py\n .ex`pms_LocallyLinearEmbedding_set_output = {'transform': ${opts['transform'] ?? undefined}}\n\npms_LocallyLinearEmbedding_set_output = {k: v for k, v in pms_LocallyLinearEmbedding_set_output.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_LocallyLinearEmbedding_set_output = bridgeLocallyLinearEmbedding[${this.id}].set_output(**pms_LocallyLinearEmbedding_set_output)`\n\n // convert the result from python to node.js\n return this\n ._py`res_LocallyLinearEmbedding_set_output.tolist() if hasattr(res_LocallyLinearEmbedding_set_output, 'tolist') else res_LocallyLinearEmbedding_set_output`\n }", "docstring": "/**\n Set output container.\n\n See [Introducing the set_output API](https://scikit-learn.org/stable/modules/generated/../../auto_examples/miscellaneous/plot_set_output.html#sphx-glr-auto-examples-miscellaneous-plot-set-output-py) for an example on how to use the API.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/manifold/LocallyLinearEmbedding.ts#L339-L370", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TSNE.learning_rate_", "code": "get learning_rate_(): Promise {\n if (this._isDisposed) {\n throw new Error('This TSNE instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error('TSNE must call init() before accessing learning_rate_')\n }\n\n return (async () => {\n // invoke accessor\n await this._py\n .ex`attr_TSNE_learning_rate_ = bridgeTSNE[${this.id}].learning_rate_`\n\n // convert the result from python to node.js\n return this\n ._py`attr_TSNE_learning_rate_.tolist() if hasattr(attr_TSNE_learning_rate_, 'tolist') else attr_TSNE_learning_rate_`\n })()\n }", "docstring": "/**\n Effective learning rate.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/manifold/TSNE.ts#L473-L491", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ConfusionMatrixDisplay.dispose", "code": "async dispose() {\n if (this._isDisposed) {\n return\n }\n\n if (!this._isInitialized) {\n return\n }\n\n await this._py.ex`del bridgeConfusionMatrixDisplay[${this.id}]`\n\n this._isDisposed = true\n }", "docstring": "/**\n Disposes of the underlying Python resources.\n\n Once `dispose()` is called, the instance is no longer usable.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/metrics/ConfusionMatrixDisplay.ts#L96-L108", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LearningCurveDisplay.ax_", "code": "get ax_(): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This LearningCurveDisplay instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'LearningCurveDisplay must call init() before accessing ax_'\n )\n }\n\n return (async () => {\n // invoke accessor\n await this._py\n .ex`attr_LearningCurveDisplay_ax_ = bridgeLearningCurveDisplay[${this.id}].ax_`\n\n // convert the result from python to node.js\n return this\n ._py`attr_LearningCurveDisplay_ax_.tolist() if hasattr(attr_LearningCurveDisplay_ax_, 'tolist') else attr_LearningCurveDisplay_ax_`\n })()\n }", "docstring": "/**\n Axes with the learning curve.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/model_selection/LearningCurveDisplay.ts#L360-L382", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RobustScaler.fit", "code": "async fit(opts: {\n /**\n The data used to compute the median and quantiles used for later scaling along the features axis.\n */\n X?: ArrayLike | SparseMatrix[]\n\n /**\n Not used, present here for API consistency by convention.\n */\n y?: any\n }): Promise {\n if (this._isDisposed) {\n throw new Error('This RobustScaler instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error('RobustScaler must call init() before fit()')\n }\n\n // set up method params\n await this._py\n .ex`pms_RobustScaler_fit = {'X': np.array(${opts['X'] ?? undefined}) if ${opts['X'] !== undefined} else None, 'y': ${opts['y'] ?? undefined}}\n\npms_RobustScaler_fit = {k: v for k, v in pms_RobustScaler_fit.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_RobustScaler_fit = bridgeRobustScaler[${this.id}].fit(**pms_RobustScaler_fit)`\n\n // convert the result from python to node.js\n return this\n ._py`res_RobustScaler_fit.tolist() if hasattr(res_RobustScaler_fit, 'tolist') else res_RobustScaler_fit`\n }", "docstring": "/**\n Compute the median and quantiles to be used for scaling.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/preprocessing/RobustScaler.ts#L134-L166", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SelfTrainingClassifier.decision_function", "code": "async decision_function(opts: {\n /**\n Array representing the data.\n */\n X?: ArrayLike | SparseMatrix[]\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This SelfTrainingClassifier instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'SelfTrainingClassifier must call init() before decision_function()'\n )\n }\n\n // set up method params\n await this._py\n .ex`pms_SelfTrainingClassifier_decision_function = {'X': np.array(${opts['X'] ?? undefined}) if ${opts['X'] !== undefined} else None}\n\npms_SelfTrainingClassifier_decision_function = {k: v for k, v in pms_SelfTrainingClassifier_decision_function.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_SelfTrainingClassifier_decision_function = bridgeSelfTrainingClassifier[${this.id}].decision_function(**pms_SelfTrainingClassifier_decision_function)`\n\n // convert the result from python to node.js\n return this\n ._py`res_SelfTrainingClassifier_decision_function.tolist() if hasattr(res_SelfTrainingClassifier_decision_function, 'tolist') else res_SelfTrainingClassifier_decision_function`\n }", "docstring": "/**\n Call decision function of the `base_estimator`.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/semi_supervised/SelfTrainingClassifier.ts#L145-L176", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LinearSVC.sparsify", "code": "async sparsify(opts: {}): Promise {\n if (this._isDisposed) {\n throw new Error('This LinearSVC instance has already been disposed')\n }\n\n if (!this._isInitialized) {\n throw new Error('LinearSVC must call init() before sparsify()')\n }\n\n // set up method params\n await this._py.ex`pms_LinearSVC_sparsify = {}\n\npms_LinearSVC_sparsify = {k: v for k, v in pms_LinearSVC_sparsify.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_LinearSVC_sparsify = bridgeLinearSVC[${this.id}].sparsify(**pms_LinearSVC_sparsify)`\n\n // convert the result from python to node.js\n return this\n ._py`res_LinearSVC_sparsify.tolist() if hasattr(res_LinearSVC_sparsify, 'tolist') else res_LinearSVC_sparsify`\n }", "docstring": "/**\n Convert coefficient matrix to sparse format.\n\n Converts the `coef_` member to a scipy.sparse matrix, which for L1-regularized models can be much more memory- and storage-efficient than the usual numpy.ndarray representation.\n\n The `intercept_` member is not converted.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/svm/LinearSVC.ts#L474-L495", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExtraTreeClassifier.decision_path", "code": "async decision_path(opts: {\n /**\n The input samples. Internally, it will be converted to `dtype=np.float32` and if a sparse matrix is provided to a sparse `csr_matrix`.\n */\n X?: ArrayLike | SparseMatrix[]\n\n /**\n Allow to bypass several input checking. Don’t use this parameter unless you know what you’re doing.\n\n @defaultValue `true`\n */\n check_input?: boolean\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This ExtraTreeClassifier instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error(\n 'ExtraTreeClassifier must call init() before decision_path()'\n )\n }\n\n // set up method params\n await this._py\n .ex`pms_ExtraTreeClassifier_decision_path = {'X': np.array(${opts['X'] ?? undefined}) if ${opts['X'] !== undefined} else None, 'check_input': ${opts['check_input'] ?? undefined}}\n\npms_ExtraTreeClassifier_decision_path = {k: v for k, v in pms_ExtraTreeClassifier_decision_path.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_ExtraTreeClassifier_decision_path = bridgeExtraTreeClassifier[${this.id}].decision_path(**pms_ExtraTreeClassifier_decision_path)`\n\n // convert the result from python to node.js\n return this\n ._py`res_ExtraTreeClassifier_decision_path.tolist() if hasattr(res_ExtraTreeClassifier_decision_path, 'tolist') else res_ExtraTreeClassifier_decision_path`\n }", "docstring": "/**\n Return the decision path in the tree.\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/tree/ExtraTreeClassifier.ts#L285-L323", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExtraTreeRegressor.fit", "code": "async fit(opts: {\n /**\n The training input samples. Internally, it will be converted to `dtype=np.float32` and if a sparse matrix is provided to a sparse `csc_matrix`.\n */\n X?: ArrayLike | SparseMatrix[]\n\n /**\n The target values (real numbers). Use `dtype=np.float64` and `order='C'` for maximum efficiency.\n */\n y?: ArrayLike\n\n /**\n Sample weights. If `undefined`, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node.\n */\n sample_weight?: ArrayLike\n\n /**\n Allow to bypass several input checking. Don’t use this parameter unless you know what you’re doing.\n\n @defaultValue `true`\n */\n check_input?: boolean\n }): Promise {\n if (this._isDisposed) {\n throw new Error(\n 'This ExtraTreeRegressor instance has already been disposed'\n )\n }\n\n if (!this._isInitialized) {\n throw new Error('ExtraTreeRegressor must call init() before fit()')\n }\n\n // set up method params\n await this._py\n .ex`pms_ExtraTreeRegressor_fit = {'X': np.array(${opts['X'] ?? undefined}) if ${opts['X'] !== undefined} else None, 'y': np.array(${opts['y'] ?? undefined}) if ${opts['y'] !== undefined} else None, 'sample_weight': np.array(${opts['sample_weight'] ?? undefined}) if ${opts['sample_weight'] !== undefined} else None, 'check_input': ${opts['check_input'] ?? undefined}}\n\npms_ExtraTreeRegressor_fit = {k: v for k, v in pms_ExtraTreeRegressor_fit.items() if v is not None}`\n\n // invoke method\n await this._py\n .ex`res_ExtraTreeRegressor_fit = bridgeExtraTreeRegressor[${this.id}].fit(**pms_ExtraTreeRegressor_fit)`\n\n // convert the result from python to node.js\n return this\n ._py`res_ExtraTreeRegressor_fit.tolist() if hasattr(res_ExtraTreeRegressor_fit, 'tolist') else res_ExtraTreeRegressor_fit`\n }", "docstring": "/**\n Build a decision tree regressor from the training set (X, y).\n */", "url": "https://github.com/transitive-bullshit/scikit-learn-ts/blob/a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc/packages/sklearn/src/generated/tree/ExtraTreeRegressor.ts#L315-L361", "sha": "a7d01fc04d3f5fe124098d13dc6efc5f2e256bbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DidResolverCacheMemory.set", "code": "public async set(didUri: string, resolutionResult: DidResolutionResult): Promise {\n this.cache.set(didUri, resolutionResult);\n }", "docstring": "/**\n * Stores a DID resolution result in the cache with a TTL.\n *\n * @param didUri - The DID string used as the key for storing the result.\n * @param resolutionResult - The DID resolution result to be cached.\n * @returns A promise that resolves when the operation is complete.\n */", "url": "https://github.com/decentralized-identity/web5-js/blob/707bc224243aceeefd62f0e8592735315d580ec2/packages/agent/src/prototyping/dids/resolver-cache-memory.ts#L51-L53", "sha": "707bc224243aceeefd62f0e8592735315d580ec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Record.contextId", "code": "get contextId() { return this.deleted ? this._initialWrite.contextId : this._contextId; }", "docstring": "/** Record's context ID. If the record is deleted, the context Id comes from the initial write */", "url": "https://github.com/decentralized-identity/web5-js/blob/707bc224243aceeefd62f0e8592735315d580ec2/packages/api/src/record.ts#L281-L281", "sha": "707bc224243aceeefd62f0e8592735315d580ec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Stream.consumeToArrayBuffer", "code": "public static async consumeToArrayBuffer({ readableStream }: { readableStream: ReadableStream}): Promise {\n const iterableStream = Stream.asAsyncIterator(readableStream);\n const arrayBuffer = await Convert.asyncIterable(iterableStream).toArrayBufferAsync();\n\n return arrayBuffer;\n }", "docstring": "/**\n * Consumes a `ReadableStream` and returns its contents as an `ArrayBuffer`.\n *\n * This method reads all data from a `ReadableStream`, collects it, and converts it into an\n * `ArrayBuffer`.\n *\n * @example\n * ```ts\n * const readableStream = new ReadableStream({ ... });\n * const arrayBuffer = await Stream.consumeToArrayBuffer({ readableStream });\n * ```\n *\n * @param readableStream - The Web `ReadableStream` whose data will be consumed.\n * @returns A Promise that resolves to an `ArrayBuffer` containing all the data from the stream.\n */", "url": "https://github.com/decentralized-identity/web5-js/blob/707bc224243aceeefd62f0e8592735315d580ec2/packages/common/src/stream.ts#L55-L60", "sha": "707bc224243aceeefd62f0e8592735315d580ec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Secp256k1.decompressPublicKey", "code": "public static async decompressPublicKey({ publicKeyBytes }: {\n publicKeyBytes: Uint8Array;\n }): Promise {\n // Decode Weierstrass points from the public key byte array.\n const point = secp256k1.ProjectivePoint.fromHex(publicKeyBytes);\n\n // Return the uncompressed form of the public key.\n return point.toRawBytes(false);\n }", "docstring": "/**\n * Converts a public key to its uncompressed form.\n *\n * @remarks\n * This method takes a compressed public key represented as a byte array and decompresses it.\n * Public key decompression involves reconstructing the y-coordinate from the x-coordinate,\n * resulting in the full public key. This method is used when the uncompressed key format is\n * required for certain cryptographic operations or interoperability.\n *\n * @example\n * ```ts\n * const compressedPublicKeyBytes = new Uint8Array([...]); // Replace with actual compressed public key bytes\n * const decompressedPublicKey = await Secp256k1.decompressPublicKey({\n * publicKeyBytes: compressedPublicKeyBytes\n * });\n * ```\n *\n * @param params - The parameters for the public key decompression.\n * @param params.publicKeyBytes - The public key as a Uint8Array.\n *\n * @returns A Promise that resolves to the uncompressed public key as a Uint8Array.\n */", "url": "https://github.com/decentralized-identity/web5-js/blob/707bc224243aceeefd62f0e8592735315d580ec2/packages/crypto/src/primitives/secp256k1.ts#L385-L393", "sha": "707bc224243aceeefd62f0e8592735315d580ec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sha256.digest", "code": "public static async digest({ data }: {\n data: Uint8Array;\n }): Promise {\n const hash = sha256(data);\n\n return hash;\n }", "docstring": "/**\n * Generates a SHA-256 hash digest for the given data.\n *\n * @remarks\n * This method produces a hash digest using the SHA-256 algorithm. The resultant digest\n * is deterministic, meaning the same data will always produce the same hash, but\n * is computationally infeasible to regenerate the original data from the hash.\n *\n * @example\n * ```ts\n * const data = new Uint8Array([...]);\n * const hash = await Sha256.digest({ data });\n * ```\n *\n * @param params - The parameters for the hashing operation.\n * @param params.data - The data to hash, represented as a Uint8Array.\n *\n * @returns A Promise that resolves to the SHA-256 hash digest of the provided data as a Uint8Array.\n */", "url": "https://github.com/decentralized-identity/web5-js/blob/707bc224243aceeefd62f0e8592735315d580ec2/packages/crypto/src/primitives/sha256.ts#L40-L46", "sha": "707bc224243aceeefd62f0e8592735315d580ec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DidWeb.resolve", "code": "public static async resolve(didUri: string, _options?: DidResolutionOptions): Promise {\n // Attempt to parse the DID URI.\n const parsedDid = Did.parse(didUri);\n\n // If parsing failed, the DID is invalid.\n if (!parsedDid) {\n return {\n ...EMPTY_DID_RESOLUTION_RESULT,\n didResolutionMetadata: { error: 'invalidDid' }\n };\n }\n\n // If the DID method is not \"web\", return an error.\n if (parsedDid.method !== DidWeb.methodName) {\n return {\n ...EMPTY_DID_RESOLUTION_RESULT,\n didResolutionMetadata: { error: 'methodNotSupported' }\n };\n }\n\n // Replace \":\" with \"/\" in the identifier and prepend \"https://\" to obtain the fully qualified\n // domain name and optional path.\n let baseUrl = `https://${parsedDid.id.replace(/:/g, '/')}`;\n\n // If the domain contains a percent encoded port value, decode the colon.\n baseUrl = decodeURIComponent(baseUrl);\n\n // Append the expected location of the DID document depending on whether a path was specified.\n const didDocumentUrl = parsedDid.id.includes(':') ?\n `${baseUrl}/did.json` :\n `${baseUrl}/.well-known/did.json`;\n\n try {\n // Perform an HTTP GET request to obtain the DID document.\n const response = await fetch(didDocumentUrl);\n\n // If the response status code is not 200, return an error.\n if (!response.ok) throw new Error('HTTP error status code returned');\n\n // Parse the DID document.\n const didDocument = await response.json() as DidDocument;\n\n return {\n ...EMPTY_DID_RESOLUTION_RESULT,\n didDocument,\n };\n\n } catch (error: any) {\n // If the DID document could not be retrieved, return an error.\n return {\n ...EMPTY_DID_RESOLUTION_RESULT,\n didResolutionMetadata: { error: 'notFound' }\n };\n }\n }", "docstring": "/**\n * Resolves a `did:web` identifier to a DID Document.\n *\n * @param didUri - The DID to be resolved.\n * @param _options - Optional parameters for resolving the DID. Unused by this DID method.\n * @returns A Promise resolving to a {@link DidResolutionResult} object representing the result of the resolution.\n */", "url": "https://github.com/decentralized-identity/web5-js/blob/707bc224243aceeefd62f0e8592735315d580ec2/packages/dids/src/methods/did-web.ts#L41-L95", "sha": "707bc224243aceeefd62f0e8592735315d580ec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ViewApi.softDelete", "code": "softDelete = (\n query?: {\n /** @format int64 */\n id?: number\n },\n params: RequestParams = {}\n ) =>\n this.request({\n path: `/api/admin/view/soft-delete`,\n method: 'DELETE',\n query: query,\n secure: true,\n ...params,\n })", "docstring": "/**\n * No description\n *\n * @tags view\n * @name SoftDelete\n * @summary 删除\n * @request DELETE:/api/admin/view/soft-delete\n * @secure\n */", "url": "https://github.com/leooneone/aibpm.ui.plus/blob/f6d6d8c37e19c734cbf0c7c98d80c39f5a56900a/src/api/admin/View.ts#L158-L171", "sha": "f6d6d8c37e19c734cbf0c7c98d80c39f5a56900a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "svgFind", "code": "function svgFind(e: any) {\n const arr = []\n const dirents = readdirSync(e, { withFileTypes: true })\n for (const dirent of dirents) {\n if (dirent.isDirectory()) arr.push(...svgFind(e + dirent.name + '/'))\n else {\n const svg = readFileSync(e + dirent.name)\n .toString()\n .replace(clearReturn, '')\n .replace(svgTitle, ($1: any, $2: any) => {\n let width = 0,\n height = 0,\n content = $2.replace(clearHeightWidth, (s1: any, s2: any, s3: any) => {\n if (s2 === 'width') width = s3\n else if (s2 === 'height') height = s3\n return ''\n })\n if (!hasViewBox.test($2)) content += `viewBox=\"0 0 ${width} ${height}\"`\n return ``\n }).replace('', '')\n arr.push(svg)\n }\n }\n return arr\n}", "docstring": "// 查找svg文件", "url": "https://github.com/leooneone/aibpm.ui.plus/blob/f6d6d8c37e19c734cbf0c7c98d80c39f5a56900a/src/icons/index.ts#L10-L34", "sha": "f6d6d8c37e19c734cbf0c7c98d80c39f5a56900a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getTablesByDatabase", "code": "const getTablesByDatabase = async (params: any) => {\n const result = await apiRequest(\n 'get',\n 'catalog/get-tables-by-database',\n params\n );\n return result;\n};", "docstring": "// 获取catalog弹窗 Folders/Tables列表", "url": "https://github.com/awslabs/sensitive-data-protection-on-aws/blob/03c90b9e2d464d014fa2b3d3c955af136f0aa3fd/source/portal/src/apis/data-catalog/api.ts#L4-L11", "sha": "03c90b9e2d464d014fa2b3d3c955af136f0aa3fd"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getPostCollect", "code": "async function getPostCollect(\n postId: string,\n): Promise {\n const db = await TGSqlite.getDB();\n const sql = \"SELECT * FROM UFMap WHERE postId = ?\";\n const res: TGApp.Sqlite.UserCollection.UFMap[] = await db.select(sql, [postId]);\n if (res.length > 0) return res;\n const unclassifiedSql = \"SELECT * FROM UFPost WHERE id = ?\";\n const unclassifiedRes: TGApp.Sqlite.UserCollection.UFPost[] = await db.select(unclassifiedSql, [\n postId,\n ]);\n return unclassifiedRes.length > 0;\n}", "docstring": "/**\n * @description 获取单个帖子的收藏信息\n * @since Beta v0.4.5\n * @param {string} postId 文章 id\n * @return {Promise} 返回收藏信息\n */", "url": "https://github.com/BTMuli/TeyvatGuide/blob/25f95d9f90b37f8b7f1d77ae7494c740b1e820b7/src/plugins/Sqlite/modules/userCollect.ts#L15-L27", "sha": "25f95d9f90b37f8b7f1d77ae7494c740b1e820b7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "animateResponseText", "code": "function animateResponseText() {\n if (finished || controller.signal.aborted) {\n responseText += remainText;\n console.log(\"[Response Animation] finished\");\n if (responseText?.length === 0) {\n options.onError?.(new Error(\"empty response from server\"));\n }\n return;\n }\n\n if (remainText.length > 0) {\n const fetchCount = Math.max(1, Math.round(remainText.length / 60));\n const fetchText = remainText.slice(0, fetchCount);\n responseText += fetchText;\n remainText = remainText.slice(fetchCount);\n options.onUpdate?.(responseText, fetchText);\n }\n\n requestAnimationFrame(animateResponseText);\n }", "docstring": "// animate response to make it looks smooth", "url": "https://github.com/ChatGPTNextWeb/NextChat/blob/a029b4330b89f8f2d1258e46fa68ba87c998a745/app/client/platforms/alibaba.ts#L149-L168", "sha": "a029b4330b89f8f2d1258e46fa68ba87c998a745"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "updateMcpConfig", "code": "async function updateMcpConfig(config: McpConfigData): Promise {\n try {\n // 确保目录存在\n await fs.mkdir(path.dirname(CONFIG_PATH), { recursive: true });\n await fs.writeFile(CONFIG_PATH, JSON.stringify(config, null, 2));\n } catch (error) {\n throw error;\n }\n}", "docstring": "// 更新 MCP 配置文件", "url": "https://github.com/ChatGPTNextWeb/NextChat/blob/a029b4330b89f8f2d1258e46fa68ba87c998a745/app/mcp/actions.ts#L366-L374", "sha": "a029b4330b89f8f2d1258e46fa68ba87c998a745"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createSign", "code": "function createSign(apiUrl: string, timestamp: number, nonceStr: string) {\n const method = 'GET'\n const signStr = `${method}\\n${apiUrl}\\n${timestamp}\\n${nonceStr}\\n\\n`\n const cert = WechatPaySpec.privateKey\n const sign = crypto.createSign('RSA-SHA256')\n sign.update(signStr)\n return sign.sign(cert, 'base64')\n}", "docstring": "/**\n * Sign a wechat query request\n */", "url": "https://github.com/zuoFeng59556/chatGPT/blob/8db2578bd0471aa3c424a290b6490d6f042bc90a/cloudFunction/wechat-order-query.ts#L66-L73", "sha": "8db2578bd0471aa3c424a290b6490d6f042bc90a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Graphic3D.drawCameraFrustum", "code": "public drawCameraFrustum(camera: Camera3D, color: Color = Color.COLOR_WHITE) {\n if (camera.type == CameraType.perspective) {\n let y = Math.tan(camera.fov / 2 * DEGREES_TO_RADIANS);\n let x = y * camera.aspect;\n let worldMatrix = camera.transform._worldMatrix;\n\n let f0 = worldMatrix.transformVector(new Vector3(-x, -y, 1));\n let f1 = worldMatrix.transformVector(new Vector3(-x, y, 1));\n let f2 = worldMatrix.transformVector(new Vector3(x, -y, 1));\n let f3 = worldMatrix.transformVector(new Vector3(x, y, 1));\n\n let far = camera.far;\n let near = camera.near;\n let pos = camera.transform.worldPosition;\n\n let farLB = new Vector3().copyFrom(f0).multiplyScalar(far).add(pos);\n let farLT = new Vector3().copyFrom(f1).multiplyScalar(far).add(pos);\n let farRB = new Vector3().copyFrom(f2).multiplyScalar(far).add(pos);\n let farRT = new Vector3().copyFrom(f3).multiplyScalar(far).add(pos);\n\n let nearLB = new Vector3().copyFrom(f0).multiplyScalar(near).add(pos);\n let nearLT = new Vector3().copyFrom(f1).multiplyScalar(near).add(pos);\n let nearRB = new Vector3().copyFrom(f2).multiplyScalar(near).add(pos);\n let nearRT = new Vector3().copyFrom(f3).multiplyScalar(near).add(pos);\n\n let custom = this.createCustomShape(`CameraFrustum_${camera.object3D.instanceID}`);\n custom.buildLines([nearLT, farLT], color);\n custom.buildLines([nearLB, farLB], color);\n custom.buildLines([nearRT, farRT], color);\n custom.buildLines([nearRB, farRB], color);\n custom.buildLines([farLT, farRT, farRB, farLB, farLT], color);\n custom.buildLines([nearLT, nearRT, nearRB, nearLB, nearLT], color);\n } else if (camera.type == CameraType.ortho) {\n camera.viewPort;\n camera.viewPort.height;\n let worldMatrix = camera.transform.worldMatrix;\n let farLT = worldMatrix.transformVector(new Vector3(camera.viewPort.width * -0.5, camera.viewPort.height * 0.5, camera.far));\n let farLB = worldMatrix.transformVector(new Vector3(camera.viewPort.width * -0.5, camera.viewPort.height * -0.5, camera.far));\n let farRT = worldMatrix.transformVector(new Vector3(camera.viewPort.width * 0.5, camera.viewPort.height * 0.5, camera.far));\n let farRB = worldMatrix.transformVector(new Vector3(camera.viewPort.width * 0.5, camera.viewPort.height * -0.5, camera.far));\n\n let nearLT = worldMatrix.transformVector(new Vector3(camera.viewPort.width * -0.5, camera.viewPort.height * 0.5, camera.near));\n let nearLB = worldMatrix.transformVector(new Vector3(camera.viewPort.width * -0.5, camera.viewPort.height * -0.5, camera.near));\n let nearRT = worldMatrix.transformVector(new Vector3(camera.viewPort.width * 0.5, camera.viewPort.height * 0.5, camera.near));\n let nearRB = worldMatrix.transformVector(new Vector3(camera.viewPort.width * 0.5, camera.viewPort.height * -0.5, camera.near));\n\n let custom = this.createCustomShape(`CameraFrustum_${camera.object3D.instanceID}`);\n custom.buildLines([nearLT, farLT], color);\n custom.buildLines([nearLB, farLB], color);\n custom.buildLines([nearRT, farRT], color);\n custom.buildLines([nearRB, farRB], color);\n custom.buildLines([farLT, farRT, farRB, farLB, farLT], color);\n custom.buildLines([nearLT, nearRT, nearRB, nearLB, nearLT], color);\n }\n }", "docstring": "/**\n * Draw the camera cone\n * @param camera The camera to display the cone\n * @param color The color of the camera cone\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/packages/graphic/renderer/Graphic3DRender.ts#L358-L412", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Generic6DofSpringConstraint.enableSpring", "code": "public enableSpring(index: number, onOff: boolean): void {\n if (this._constraint) {\n this._constraint.enableSpring(index, onOff);\n } else {\n this._springParams.push({ index, onOff });\n }\n }", "docstring": "/**\n * 启用或禁用弹簧功能。\n * @param index 弹簧的索引\n * @param onOff 是否启用\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/packages/physics/constraint/Generic6DofSpringConstraint.ts#L70-L76", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PhysicsDragger.enable", "code": "public set enable(value: boolean) {\n if (this._enable === value) return;\n this._enable = value;\n value ? this.registerEvents() : this.unregisterEvents();\n }", "docstring": "/**\n * 是否启用拖拽功能\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/packages/physics/utils/PhysicsDragger.ts#L29-L33", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CameraControllerBase.target", "code": "public set target(val: Object3D | null) {\n if (this._target == val) return;\n this._target = val;\n }", "docstring": "/**\n *\n * Set the control object3D\n * @param val Object3D\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/components/controller/CameraControllerBase.ts#L41-L44", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LightBase.lightColor", "code": "public get lightColor(): Color {\n return this.lightData.lightColor;\n }", "docstring": "/**\n * Get light source color\n * @return Color\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/components/lights/LightBase.ts#L183-L185", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SpotLight.radius", "code": "public get radius(): number {\n return this.lightData.radius as number;\n }", "docstring": "/**\n *\n * Get the radius of the light source\n * @return number\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/components/lights/SpotLight.ts#L71-L73", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Object3D.notifyChange", "code": "public notifyChange(): void {\n this.transform.notifyChange();\n }", "docstring": "/**\n * Notify transformation attribute updates\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/core/entities/Object3D.ts#L328-L330", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Object3D.rotationY", "code": "public set rotationY(value: number) {\n this.transform.rotationY = value;\n }", "docstring": "/**\n *\n * Set the y rotation relative to the local coordinates of the parent container.\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/core/entities/Object3D.ts#L460-L462", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RenderShaderPass.topology", "code": "public get topology(): GPUPrimitiveTopology {\n return this.shaderState.topology;\n }", "docstring": "/**\n * Primitive topology\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/gfx/graphics/webGpu/shader/RenderShaderPass.ts#L214-L216", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RendererJob.pause", "code": "public pause() {\n this.pauseRender = true;\n }", "docstring": "/**\n * pause render task\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/gfx/renderJob/jobs/RendererJob.ts#L159-L161", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UnLitTexArrayMaterial.baseColor", "code": "public get baseColor() {\n return this.shader.getUniformColor(\"baseColor\");\n }", "docstring": "/**\n * get base color (tint color)\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/materials/UnLitTexArrayMaterial.ts#L43-L45", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Matrix3.prepend", "code": "public prepend(a: number, b: number, c: number, d: number, tx: number, ty: number): Matrix3 {\n let tx1 = this.tx;\n if (a != 1 || b != 0 || c != 0 || d != 1) {\n let a1 = this.a;\n let c1 = this.c;\n this.a = a1 * a + this.b * c;\n this.b = a1 * b + this.b * d;\n this.c = c1 * a + this.d * c;\n this.d = c1 * b + this.d * d;\n }\n this.tx = tx1 * a + this.ty * c + tx;\n this.ty = tx1 * b + this.ty * d + ty;\n return this;\n }", "docstring": "/**\n * get a front matrix by multiplication\n * @param a Multiply by a\n * @param b Multiply by b\n * @param c Multiply by c\n * @param d Multiply by d\n * @param tx Multiply by tx\n * @param ty Multiply by ty\n * @returns prematrix\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/math/Matrix3.ts#L287-L300", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Matrix4.decompose", "code": "public decompose(orientationStyle: string = 'eulerAngles', target?: Vector3[]): Vector3[] {\n let q: Quaternion = Quaternion.CALCULATION_QUATERNION;\n let vec: Vector3[] = target ? target : Matrix4._prs;\n this.copyRawDataTo(Matrix4.decomposeRawData);\n let mr = Matrix4.decomposeRawData;\n\n let pos: Vector3 = vec[0];\n pos.x = mr[12];\n pos.y = mr[13];\n pos.z = mr[14];\n mr[12] = 0;\n mr[13] = 0;\n mr[14] = 0;\n\n let scale: Vector3 = vec[2];\n\n scale.x = Math.sqrt(mr[0] * mr[0] + mr[1] * mr[1] + mr[2] * mr[2]);\n scale.y = Math.sqrt(mr[4] * mr[4] + mr[5] * mr[5] + mr[6] * mr[6]);\n scale.z = Math.sqrt(mr[8] * mr[8] + mr[9] * mr[9] + mr[10] * mr[10]);\n\n if (mr[0] * (mr[5] * mr[10] - mr[6] * mr[9])\n - mr[1] * (mr[4] * mr[10] - mr[6] * mr[8])\n + mr[2] * (mr[4] * mr[9] - mr[5] * mr[8]) < 0) {\n scale.z = -scale.z;\n }\n\n mr[0] /= scale.x;\n mr[1] /= scale.x;\n mr[2] /= scale.x;\n mr[4] /= scale.y;\n mr[5] /= scale.y;\n mr[6] /= scale.y;\n mr[8] /= scale.z;\n mr[9] /= scale.z;\n mr[10] /= scale.z;\n\n let rot = vec[1];\n let tr: number;\n switch (orientationStyle) {\n case Orientation3D.AXIS_ANGLE:\n rot.w = Math.acos((mr[0] + mr[5] + mr[10] - 1) / 2);\n\n let len: number = Math.sqrt((mr[6] - mr[9]) * (mr[6] - mr[9]) + (mr[8] - mr[2]) * (mr[8] - mr[2]) + (mr[1] - mr[4]) * (mr[1] - mr[4]));\n rot.x = (mr[6] - mr[9]) / len;\n rot.y = (mr[8] - mr[2]) / len;\n rot.z = (mr[1] - mr[4]) / len;\n\n break;\n case Orientation3D.QUATERNION:\n tr = mr[0] + mr[5] + mr[10];\n\n if (tr > 0) {\n rot.w = Math.sqrt(1 + tr) / 2;\n\n rot.x = (mr[6] - mr[9]) / (4 * rot.w);\n rot.y = (mr[8] - mr[2]) / (4 * rot.w);\n rot.z = (mr[1] - mr[4]) / (4 * rot.w);\n } else if (mr[0] > mr[5] && mr[0] > mr[10]) {\n rot.x = Math.sqrt(1 + mr[0] - mr[5] - mr[10]) / 2;\n\n rot.w = (mr[6] - mr[9]) / (4 * rot.x);\n rot.y = (mr[1] + mr[4]) / (4 * rot.x);\n rot.z = (mr[8] + mr[2]) / (4 * rot.x);\n } else if (mr[5] > mr[10]) {\n rot.y = Math.sqrt(1 + mr[5] - mr[0] - mr[10]) / 2;\n\n rot.x = (mr[1] + mr[4]) / (4 * rot.y);\n rot.w = (mr[8] - mr[2]) / (4 * rot.y);\n rot.z = (mr[6] + mr[9]) / (4 * rot.y);\n } else {\n rot.z = Math.sqrt(1 + mr[10] - mr[0] - mr[5]) / 2;\n\n rot.x = (mr[8] + mr[2]) / (4 * rot.z);\n rot.y = (mr[6] + mr[9]) / (4 * rot.z);\n rot.w = (mr[1] - mr[4]) / (4 * rot.z);\n }\n\n break;\n case Orientation3D.EULER_ANGLES:\n tr = mr[0] + mr[5] + mr[10];\n\n if (tr > 0) {\n q.w = Math.sqrt(1 + tr) / 2;\n\n q.x = (mr[6] - mr[9]) / (4 * q.w);\n q.y = (mr[8] - mr[2]) / (4 * q.w);\n q.z = (mr[1] - mr[4]) / (4 * q.w);\n } else if (mr[0] > mr[5] && mr[0] > mr[10]) {\n q.x = Math.sqrt(1 + mr[0] - mr[5] - mr[10]) / 2;\n\n q.w = (mr[6] - mr[9]) / (4 * q.x);\n q.y = (mr[1] + mr[4]) / (4 * q.x);\n q.z = (mr[8] + mr[2]) / (4 * q.x);\n } else if (mr[5] > mr[10]) {\n rot.y = Math.sqrt(1 + mr[5] - mr[0] - mr[10]) / 2;\n\n q.x = (mr[1] + mr[4]) / (4 * q.y);\n q.w = (mr[8] - mr[2]) / (4 * q.y);\n q.z = (mr[6] + mr[9]) / (4 * q.y);\n } else {\n q.z = Math.sqrt(1 + mr[10] - mr[0] - mr[5]) / 2;\n\n q.x = (mr[8] + mr[2]) / (4 * q.z);\n q.y = (mr[6] + mr[9]) / (4 * q.z);\n q.w = (mr[1] - mr[4]) / (4 * q.z);\n }\n q.getEulerAngles(rot);\n\n break;\n }\n\n vec[0] = pos;\n vec[1] = rot;\n vec[2] = scale;\n\n return vec;\n }", "docstring": "/**\n * Decompose the current matrix\n * @param orientationStyle The default decomposition type is Orientation3D.EULER_ANGLES\n * @see Orientation3D.AXIS_ANGLE\n * @see Orientation3D.EULER_ANGLES\n * @see Orientation3D.QUATERNION\n * @returns Vector3[3] pos rot scale\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/math/Matrix4.ts#L1437-L1553", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Matrix4.makeBasis", "code": "public makeBasis(xAxis: Vector3, yAxis: Vector3, zAxis: Vector3) {\n this.setElements(\n xAxis.x, yAxis.x, zAxis.x, 0,\n xAxis.y, yAxis.y, zAxis.y, 0,\n xAxis.z, yAxis.z, zAxis.z, 0,\n 0, 0, 0, 1\n );\n return this;\n }", "docstring": "/**\n * Generate the matrix according to the three axes\n * @param xAxis\n * @param yAxis\n * @param zAxis\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/math/Matrix4.ts#L2066-L2074", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Quaternion.identity", "code": "public static identity() {\n return Quaternion._zero;\n }", "docstring": "/**\n * Identity quaternion\n * @returns \n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/math/Quaternion.ts#L52-L54", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Ray.sqrDistToPoint", "code": "public sqrDistToPoint(P: Vector3): number {\n let v = this._dir;\n let w = P.subtract(this.origin);\n\n let c1 = dot(w, v);\n let c2 = dot(v, v);\n let b = c1 / c2;\n\n let Pb = this.getPoint(b);\n return sqrMagnitude(P.subtract(Pb));\n }", "docstring": "/**\n * Calculate the distance from a point\n * @param P Specify Point\n * @returns result\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/math/Ray.ts#L179-L189", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TextureCubeFaceData.generateMipmap", "code": "private generateMipmap(texture: Texture) {\n let mipmap: number = 1;\n while (mipmap < this._texture.mipmapCount) {\n this.generateMipmapAtLevel(mipmap, texture);\n mipmap++;\n }\n }", "docstring": "/**\n * @private generateMipmap\n * @param texture texture reference\n */", "url": "https://github.com/Orillusion/orillusion/blob/8887427f0a2e426a1cc75ef022c8649bcdd785b0/src/textures/TextureCubeFaceData.ts#L68-L74", "sha": "8887427f0a2e426a1cc75ef022c8649bcdd785b0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NDKCacheAdapterDexie.byTags", "code": "private byTags(filter: NDKFilter, subscription: NDKSubscription): boolean {\n const tagFilters = Object.entries(filter)\n .filter(([filter]) => filter.startsWith(\"#\") && filter.length === 2)\n .map(([filter, values]) => [filter[1], values]);\n if (tagFilters.length === 0) return false;\n\n // Go through all the tags (#e, #p, etc)\n for (const [tag, values] of tagFilters) {\n // Go throgh each value in the filter\n for (const value of values as string[]) {\n const tagValue = tag + value;\n\n // Get all events with this tag\n const eventIds = this.eventTags.getSet(tagValue);\n if (!eventIds) continue;\n\n // Go through each event that came back\n eventIds.forEach((id) => {\n const event = this.events.get(id);\n if (!event) return;\n\n if (!filter.kinds || filter.kinds.includes(event.kind!)) {\n foundEvent(subscription, event, event.relay, filter);\n }\n });\n }\n }\n\n return true;\n }", "docstring": "/**\n * Searches by tags and optionally filters by tags\n */", "url": "https://github.com/nostr-dev-kit/ndk/blob/fdbe6e5934c431318f1f210fa9f797f40b53326c/ndk-cache-dexie/src/index.ts#L497-L526", "sha": "fdbe6e5934c431318f1f210fa9f797f40b53326c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NDKCashuToken.cleanProof", "code": "private cleanProof(proof: Proof): Proof {\n return {\n id: proof.id,\n amount: proof.amount,\n C: proof.C,\n secret: proof.secret\n };\n }", "docstring": "/**\n * Returns a minimal proof object with only essential properties\n */", "url": "https://github.com/nostr-dev-kit/ndk/blob/fdbe6e5934c431318f1f210fa9f797f40b53326c/ndk-wallet/src/wallets/cashu/token.ts#L88-L95", "sha": "fdbe6e5934c431318f1f210fa9f797f40b53326c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LevelService.findAll", "code": "findAll() {\n return this.levelRepository.find();\n }", "docstring": "/* 不分页查询所有家长引导 */", "url": "https://github.com/yinMrsir/chunyu-cms/blob/7177fcbb75b941cbf7556e391c05b37e992c1476/Nest-server/src/modules/basic/level/level.service.ts#L40-L42", "sha": "7177fcbb75b941cbf7556e391c05b37e992c1476"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SolidityParser.ruleNames", "code": "public get ruleNames(): string[] { return SolidityParser.ruleNames; }", "docstring": "// @Override", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/prettier-plugin-solidity/node_modules/@solidity-parser/parser/src/antlr/SolidityParser.ts#L349-L349", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SolidityParser.customErrorDefinition", "code": "public customErrorDefinition(): CustomErrorDefinitionContext {\n\t\tlet _localctx: CustomErrorDefinitionContext = new CustomErrorDefinitionContext(this._ctx, this.state);\n\t\tthis.enterRule(_localctx, 30, SolidityParser.RULE_customErrorDefinition);\n\t\ttry {\n\t\t\tthis.enterOuterAlt(_localctx, 1);\n\t\t\t{\n\t\t\tthis.state = 364;\n\t\t\tthis.match(SolidityParser.T__24);\n\t\t\tthis.state = 365;\n\t\t\tthis.identifier();\n\t\t\tthis.state = 366;\n\t\t\tthis.parameterList();\n\t\t\tthis.state = 367;\n\t\t\tthis.match(SolidityParser.T__1);\n\t\t\t}\n\t\t}\n\t\tcatch (re) {\n\t\t\tif (re instanceof RecognitionException) {\n\t\t\t\t_localctx.exception = re;\n\t\t\t\tthis._errHandler.reportError(this, re);\n\t\t\t\tthis._errHandler.recover(this, re);\n\t\t\t} else {\n\t\t\t\tthrow re;\n\t\t\t}\n\t\t}\n\t\tfinally {\n\t\t\tthis.exitRule();\n\t\t}\n\t\treturn _localctx;\n\t}", "docstring": "// @RuleVersion(0)", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/prettier-plugin-solidity/node_modules/@solidity-parser/parser/src/antlr/SolidityParser.ts#L1288-L1317", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SolidityParser.assemblyFunctionDefinition", "code": "public assemblyFunctionDefinition(): AssemblyFunctionDefinitionContext {\n\t\tlet _localctx: AssemblyFunctionDefinitionContext = new AssemblyFunctionDefinitionContext(this._ctx, this.state);\n\t\tthis.enterRule(_localctx, 176, SolidityParser.RULE_assemblyFunctionDefinition);\n\t\tlet _la: number;\n\t\ttry {\n\t\t\tthis.enterOuterAlt(_localctx, 1);\n\t\t\t{\n\t\t\tthis.state = 1061;\n\t\t\tthis.match(SolidityParser.T__29);\n\t\t\tthis.state = 1062;\n\t\t\tthis.identifier();\n\t\t\tthis.state = 1063;\n\t\t\tthis.match(SolidityParser.T__22);\n\t\t\tthis.state = 1065;\n\t\t\tthis._errHandler.sync(this);\n\t\t\t_la = this._input.LA(1);\n\t\t\tif (_la === SolidityParser.T__13 || _la === SolidityParser.T__24 || ((((_la - 36)) & ~0x1F) === 0 && ((1 << (_la - 36)) & ((1 << (SolidityParser.T__35 - 36)) | (1 << (SolidityParser.T__41 - 36)) | (1 << (SolidityParser.T__53 - 36)))) !== 0) || ((((_la - 95)) & ~0x1F) === 0 && ((1 << (_la - 95)) & ((1 << (SolidityParser.T__94 - 95)) | (1 << (SolidityParser.LeaveKeyword - 95)) | (1 << (SolidityParser.PayableKeyword - 95)) | (1 << (SolidityParser.ConstructorKeyword - 95)))) !== 0) || _la === SolidityParser.ReceiveKeyword || _la === SolidityParser.Identifier) {\n\t\t\t\t{\n\t\t\t\tthis.state = 1064;\n\t\t\t\tthis.assemblyIdentifierList();\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tthis.state = 1067;\n\t\t\tthis.match(SolidityParser.T__23);\n\t\t\tthis.state = 1069;\n\t\t\tthis._errHandler.sync(this);\n\t\t\t_la = this._input.LA(1);\n\t\t\tif (_la === SolidityParser.T__93) {\n\t\t\t\t{\n\t\t\t\tthis.state = 1068;\n\t\t\t\tthis.assemblyFunctionReturns();\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tthis.state = 1071;\n\t\t\tthis.assemblyBlock();\n\t\t\t}\n\t\t}\n\t\tcatch (re) {\n\t\t\tif (re instanceof RecognitionException) {\n\t\t\t\t_localctx.exception = re;\n\t\t\t\tthis._errHandler.reportError(this, re);\n\t\t\t\tthis._errHandler.recover(this, re);\n\t\t\t} else {\n\t\t\t\tthrow re;\n\t\t\t}\n\t\t}\n\t\tfinally {\n\t\t\tthis.exitRule();\n\t\t}\n\t\treturn _localctx;\n\t}", "docstring": "// @RuleVersion(0)", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/prettier-plugin-solidity/node_modules/@solidity-parser/parser/src/antlr/SolidityParser.ts#L5546-L5598", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IdentifierListContext.accept", "code": "public accept(visitor: SolidityVisitor): Result {\n\t\tif (visitor.visitIdentifierList) {\n\t\t\treturn visitor.visitIdentifierList(this);\n\t\t} else {\n\t\t\treturn visitor.visitChildren(this);\n\t\t}\n\t}", "docstring": "// @Override", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/prettier-plugin-solidity/node_modules/@solidity-parser/parser/src/antlr/SolidityParser.ts#L9586-L9592", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InlineAssemblyStatementContext.enterRule", "code": "public enterRule(listener: SolidityListener): void {\n\t\tif (listener.enterInlineAssemblyStatement) {\n\t\t\tlistener.enterInlineAssemblyStatement(this);\n\t\t}\n\t}", "docstring": "// @Override", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/@solidity-parser/parser/src/antlr/SolidityParser.ts#L9110-L9114", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AssemblyCaseContext.accept", "code": "public accept(visitor: SolidityVisitor): Result {\n\t\tif (visitor.visitAssemblyCase) {\n\t\t\treturn visitor.visitAssemblyCase(this);\n\t\t} else {\n\t\t\treturn visitor.visitChildren(this);\n\t\t}\n\t}", "docstring": "// @Override", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/@solidity-parser/parser/src/antlr/SolidityParser.ts#L10330-L10336", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Address.isZero", "code": "isZero(): boolean {\n return this.equals(Address.zero())\n }", "docstring": "/**\n * Is address zero.\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/ethereumjs-util/src/address.ts#L88-L90", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HardhatModule._hardhatSetPrevRandaoParams", "code": "private _hardhatSetPrevRandaoParams(params: any[]): [Buffer] {\n // using rpcHash because it's also 32 bytes long\n return validateParams(params, rpcHash);\n }", "docstring": "// hardhat_setPrevRandao", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/hardhat/src/internal/hardhat-network/provider/modules/hardhat.ts#L411-L414", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "B2B_G", "code": "function B2B_G(\n v: Uint32Array,\n mw: Uint32Array,\n a: number,\n b: number,\n c: number,\n d: number,\n ix: number,\n iy: number\n) {\n const x0 = mw[ix]\n const x1 = mw[ix + 1]\n const y0 = mw[iy]\n const y1 = mw[iy + 1]\n\n ADD64AA(v, a, b) // v[a,a+1] += v[b,b+1] ... in JS we must store a uint64 as two uint32s\n ADD64AC(v, a, x0, x1) // v[a, a+1] += x ... x0 is the low 32 bits of x, x1 is the high 32 bits\n\n // v[d,d+1] = (v[d,d+1] xor v[a,a+1]) rotated to the right by 32 bits\n let xor0 = v[d] ^ v[a]\n let xor1 = v[d + 1] ^ v[a + 1]\n v[d] = xor1\n v[d + 1] = xor0\n\n ADD64AA(v, c, d)\n\n // v[b,b+1] = (v[b,b+1] xor v[c,c+1]) rotated right by 24 bits\n xor0 = v[b] ^ v[c]\n xor1 = v[b + 1] ^ v[c + 1]\n v[b] = (xor0 >>> 24) ^ (xor1 << 8)\n v[b + 1] = (xor1 >>> 24) ^ (xor0 << 8)\n\n ADD64AA(v, a, b)\n ADD64AC(v, a, y0, y1)\n\n // v[d,d+1] = (v[d,d+1] xor v[a,a+1]) rotated right by 16 bits\n xor0 = v[d] ^ v[a]\n xor1 = v[d + 1] ^ v[a + 1]\n v[d] = (xor0 >>> 16) ^ (xor1 << 16)\n v[d + 1] = (xor1 >>> 16) ^ (xor0 << 16)\n\n ADD64AA(v, c, d)\n\n // v[b,b+1] = (v[b,b+1] xor v[c,c+1]) rotated right by 63 bits\n xor0 = v[b] ^ v[c]\n xor1 = v[b + 1] ^ v[c + 1]\n v[b] = (xor1 >>> 31) ^ (xor0 << 1)\n v[b + 1] = (xor0 >>> 31) ^ (xor1 << 1)\n}", "docstring": "// G Mixing function", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/@nomicfoundation/ethereumjs-evm/src/precompiles/09-blake2f.ts#L48-L96", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "safeSlice", "code": "function safeSlice(input: Uint8Array, start: number, end: number) {\n if (end > input.length) {\n throw new Error('invalid RLP (safeSlice): end slice of Uint8Array out-of-bounds')\n }\n return input.slice(start, end)\n}", "docstring": "/**\n * Slices a Uint8Array, throws if the slice goes out-of-bounds of the Uint8Array.\n * E.g. `safeSlice(hexToBytes('aa'), 1, 2)` will throw.\n * @param input\n * @param start\n * @param end\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/@nomicfoundation/ethereumjs-tx/node_modules/@nomicfoundation/ethereumjs-rlp/src/index.ts#L40-L45", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "updateMarkCount", "code": "const updateMarkCount = (markList: MarkItemType[]) => {\n markList.forEach((mark: MarkItemType) => {\n const { msgId, markType, markCount } = mark\n\n const msgItem = currentMessageMap.value?.get(msgId)\n if (msgItem) {\n if (markType === MarkEnum.LIKE) {\n msgItem.message.messageMark.likeCount = markCount\n } else if (markType === MarkEnum.DISLIKE) {\n msgItem.message.messageMark.dislikeCount = markCount\n }\n }\n })\n }", "docstring": "// 更新点赞、举报数", "url": "https://github.com/Evansy/MallChatWeb/blob/aab69ea2f97aa0b94a3fcbff06995c16c2a8bf5c/src/stores/chat.ts#L375-L388", "sha": "aab69ea2f97aa0b94a3fcbff06995c16c2a8bf5c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChatgptService.getBill", "code": "async getBill(): Promise {\n // throw new Error('Method not implemented.');\n return output({ code: 0, data: null });\n }", "docstring": "/**\n * get account bill token\n */", "url": "https://github.com/zhpd/chatgpt-plus/blob/6b076be24ec297e18daac0740d5a9ab82f6e60ed/service/src/modules/chatgpt/chatgpt.service.ts#L147-L150", "sha": "6b076be24ec297e18daac0740d5a9ab82f6e60ed"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "csrf", "code": "const csrf = async () => {\n /**\n * 1,验证referer\n * 2,验证origin\n */\n};", "docstring": "// import { ParameterizedContext } from 'koa';", "url": "https://github.com/galaxy-s10/billd-live-server/blob/5676de99286a75a3e817fed1a6434932b8bbf7d4/src/middleware/csrf.middleware.ts#L6-L11", "sha": "5676de99286a75a3e817fed1a6434932b8bbf7d4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ConceptsService.getConceptModels", "code": "public static getConceptModels(\n namespace: string,\n conceptName: string,\n ): CancelablePromise> {\n return __request(OpenAPI, {\n method: 'GET',\n url: '/api/v1/concepts/{namespace}/{concept_name}/model',\n path: {\n 'namespace': namespace,\n 'concept_name': conceptName,\n },\n errors: {\n 422: `Validation Error`,\n },\n });\n }", "docstring": "/**\n * Get Concept Models\n * Get a concept model from a database.\n * @param namespace\n * @param conceptName\n * @returns ConceptModelInfo Successful Response\n * @throws ApiError\n */", "url": "https://github.com/databricks/lilac/blob/b7d92b775fe5dc813283ab3ef9e62007f728c3b9/web/lib/fastapi_client/services/ConceptsService.ts#L204-L219", "sha": "b7d92b775fe5dc813283ab3ef9e62007f728c3b9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LikeC4ViewModel.node", "code": "public node(node: M['NodeOrId']): NodeModel {\n const nodeId = getId(node)\n return nonNullable(this.#nodes.get(nodeId), `Node ${nodeId} not found in view ${this.$view.id}`)\n }", "docstring": "/**\n * Get node by id.\n * @throws Error if node is not found.\n */", "url": "https://github.com/likec4/likec4/blob/f420421cb95b7fb5d40f6556e138394b2a7a5ad1/packages/core/src/model/view/LikeC4ViewModel.ts#L130-L133", "sha": "f420421cb95b7fb5d40f6556e138394b2a7a5ad1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "printEdge", "code": "const printEdge = (edge: ComputedEdge): CompositeGeneratorNode => {\n return new CompositeGeneratorNode().append(\n names.get(edge.source),\n ' -.',\n edge.label ? ' \"' + edge.label.replaceAll('\\n', '\\\\n') + '\" .-' : '-',\n '> ',\n names.get(edge.target)\n )\n }", "docstring": "// return `${names.get(edge.source)} -> ${names.get(edge.target)}${edge.label ? ': ' + edge.label : ''}`", "url": "https://github.com/likec4/likec4/blob/f420421cb95b7fb5d40f6556e138394b2a7a5ad1/packages/generators/src/mmd/generate-mmd.ts#L66-L74", "sha": "f420421cb95b7fb5d40f6556e138394b2a7a5ad1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LikeC4ScopeProvider.uniqueDescedants", "code": "private uniqueDescedants(of: () => ast.Element | undefined): Stream {\n return new StreamImpl(\n () => {\n const element = of()\n const fqn = element && this.fqnIndex.getFqn(element)\n if (fqn) {\n return this.fqnIndex.uniqueDescedants(fqn).iterator()\n }\n return null\n },\n iterator => {\n if (iterator) {\n return iterator.next()\n }\n return DONE_RESULT\n },\n )\n }", "docstring": "// we need lazy resolving here", "url": "https://github.com/likec4/likec4/blob/f420421cb95b7fb5d40f6556e138394b2a7a5ad1/packages/language-server/src/references/scope-provider.ts#L42-L59", "sha": "f420421cb95b7fb5d40f6556e138394b2a7a5ad1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MmdWasmRuntime.camera", "code": "public get camera(): Nullable {\n return this._camera;\n }", "docstring": "/**\n * MMD camera\n */", "url": "https://github.com/noname0310/babylon-mmd/blob/b93b7f2dcf3fa3d72eecbb228d1c759b3d678357/src/Runtime/Optimized/mmdWasmRuntime.ts#L1107-L1109", "sha": "b93b7f2dcf3fa3d72eecbb228d1c759b3d678357"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MmdWasmRuntimeModelAnimation.Create", "code": "public static Create(animation: MmdWasmAnimation, model: MmdWasmModel, onDispose: () => void, retargetingMap?: { [key: string]: string }, logger?: ILogger): MmdWasmRuntimeModelAnimation {\n const wasmInstance = animation._poolWrapper.instance;\n const animationPool = animation._poolWrapper.pool;\n\n const skeleton = model.skeleton;\n const bones = skeleton.bones;\n\n const boneIndexMap = new Map();\n if (retargetingMap === undefined) {\n for (let i = 0; i < bones.length; ++i) {\n boneIndexMap.set(bones[i].name, i);\n }\n } else {\n for (let i = 0; i < bones.length; ++i) {\n boneIndexMap.set(retargetingMap[bones[i].name] ?? bones[i].name, i);\n }\n }\n\n const boneBindIndexMapPtr = animationPool.createBoneBindIndexMap(animation.ptr);\n const boneBindIndexMap = wasmInstance.createTypedArray(Int32Array, boneBindIndexMapPtr, animation.boneTracks.length);\n {\n const boneTracks = animation.boneTracks;\n const boneBindIndexMapArray = boneBindIndexMap.array;\n for (let i = 0; i < boneTracks.length; ++i) {\n const boneTrack = boneTracks[i];\n const boneIndex = boneIndexMap.get(boneTrack.name);\n if (boneIndex === undefined) {\n logger?.warn(`Binding failed: bone ${boneTrack.name} not found`);\n boneBindIndexMapArray[i] = -1;\n } else {\n boneBindIndexMapArray[i] = boneIndex;\n }\n }\n }\n\n const movableBoneBindIndexMapPtr = animationPool.createMovableBoneBindIndexMap(animation.ptr);\n const movableBoneBindIndexMap = wasmInstance.createTypedArray(Int32Array, movableBoneBindIndexMapPtr, animation.movableBoneTracks.length);\n {\n const movableBoneBindIndexMapArray = movableBoneBindIndexMap.array;\n const movableBoneTracks = animation.movableBoneTracks;\n for (let i = 0; i < movableBoneTracks.length; ++i) {\n const movableBoneTrack = movableBoneTracks[i];\n const boneIndex = boneIndexMap.get(movableBoneTrack.name);\n if (boneIndex === undefined) {\n logger?.warn(`Binding failed: bone ${movableBoneTrack.name} not found`);\n movableBoneBindIndexMapArray[i] = -1;\n } else {\n movableBoneBindIndexMapArray[i] = boneIndex;\n }\n }\n }\n\n const morphBindIndexMap: Nullable[] = new Array(animation.morphTracks.length);\n const morphController = model.morph;\n const morphTracks = animation.morphTracks;\n for (let i = 0; i < morphTracks.length; ++i) {\n const morphTrack = morphTracks[i];\n const mappedName = retargetingMap?.[morphTrack.name] ?? morphTrack.name;\n const morphIndices = morphController.getMorphIndices(mappedName);\n if (morphIndices === undefined) {\n logger?.warn(`Binding failed: morph ${mappedName} not found`);\n morphBindIndexMap[i] = null;\n } else {\n morphBindIndexMap[i] = morphIndices;\n }\n }\n\n const morphLengthBufferPtr = animationPool.allocateLengthsBuffer(morphTracks.length);\n const morphLengthBuffer = wasmInstance.createTypedArray(Uint32Array, morphLengthBufferPtr, morphTracks.length);\n {\n const morphLengthBufferArray = morphLengthBuffer.array;\n const wasmMorphIndexMap = morphController.wasmMorphIndexMap;\n for (let i = 0; i < morphTracks.length; ++i) {\n let indicesCount = 0;\n const morphIndices = morphBindIndexMap[i];\n if (morphIndices !== null) {\n for (let j = 0; j < morphIndices.length; ++j) {\n const remappedIndex = wasmMorphIndexMap[morphIndices[j]];\n if (remappedIndex !== undefined && remappedIndex !== -1) indicesCount += 1;\n }\n }\n morphLengthBufferArray[i] = indicesCount;\n }\n }\n const morphBindIndexMapPtr = animationPool.createMorphBindIndexMap(animation.ptr, morphLengthBufferPtr);\n {\n const wasmMorphIndexMap = morphController.wasmMorphIndexMap;\n for (let i = 0; i < morphTracks.length; ++i) {\n const nthMorphIndicesPtr = animationPool.getNthMorphBindIndexMap(morphBindIndexMapPtr, i);\n const nthMorphIndices = wasmInstance.createTypedArray(Int32Array, nthMorphIndicesPtr, morphLengthBuffer.array[i]).array;\n\n let indicesCount = 0;\n const morphIndices = morphBindIndexMap[i];\n if (morphIndices !== null) {\n for (let j = 0; j < morphIndices.length; ++j) {\n const remappedIndex = wasmMorphIndexMap[morphIndices[j]];\n if (remappedIndex !== undefined && remappedIndex !== -1) {\n nthMorphIndices[indicesCount] = remappedIndex;\n indicesCount += 1;\n }\n }\n }\n }\n }\n animationPool.deallocateLengthsBuffer(morphLengthBufferPtr, morphTracks.length);\n\n const ikSolverBindIndexMapPtr = animationPool.createIkSolverBindIndexMap(animation.ptr);\n const ikSolverBindIndexMap = wasmInstance.createTypedArray(Int32Array, ikSolverBindIndexMapPtr, animation.propertyTrack.ikBoneNames.length);\n {\n const ikSolverBindIndexMapArray = ikSolverBindIndexMap.array;\n const runtimeBones = model.runtimeBones;\n const propertyTrackIkBoneNames = animation.propertyTrack.ikBoneNames;\n for (let i = 0; i < propertyTrackIkBoneNames.length; ++i) {\n const ikBoneName = propertyTrackIkBoneNames[i];\n const ikBoneIndex = boneIndexMap.get(ikBoneName);\n if (ikBoneIndex === undefined) {\n logger?.warn(`Binding failed: IK bone ${ikBoneName} not found`);\n ikSolverBindIndexMapArray[i] = -1;\n } else {\n const ikSolverIndex = runtimeBones[ikBoneIndex].ikSolverIndex;\n if (ikSolverIndex === -1) {\n logger?.warn(`Binding failed: IK solver for bone ${ikBoneName} not found`);\n ikSolverBindIndexMapArray[i] = -1;\n } else {\n ikSolverBindIndexMapArray[i] = ikSolverIndex;\n }\n }\n }\n }\n\n const runtimeAnimationPtr = animationPool.createRuntimeAnimation(\n animation.ptr,\n boneBindIndexMapPtr,\n movableBoneBindIndexMapPtr,\n morphBindIndexMapPtr,\n ikSolverBindIndexMapPtr\n );\n\n return new MmdWasmRuntimeModelAnimation(\n runtimeAnimationPtr,\n model.ptr,\n animation,\n boneBindIndexMap,\n movableBoneBindIndexMap,\n morphController,\n morphBindIndexMap,\n model.mesh.metadata.meshes,\n ikSolverBindIndexMap,\n model.mesh.metadata.materials,\n onDispose\n );\n }", "docstring": "/**\n * @internal\n * Bind animation to model and prepare material for morph animation\n * @param animation Animation to bind\n * @param model Bind target\n * @param onDispose Callback when this instance is disposed\n * @param retargetingMap Animation bone name to model bone name map\n * @param logger Logger\n * @return MmdRuntimeModelAnimation instance\n */", "url": "https://github.com/noname0310/babylon-mmd/blob/b93b7f2dcf3fa3d72eecbb228d1c759b3d678357/src/Runtime/Optimized/Animation/mmdWasmRuntimeModelAnimation.ts#L236-L387", "sha": "b93b7f2dcf3fa3d72eecbb228d1c759b3d678357"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AuthService.verifyToken", "code": "verifyToken(token: string): JWTObject {\n return jwt.verify(token, this.configuration.secret) as JWTObject;\n }", "docstring": "// eslint-disable-next-line @typescript-eslint/no-unused-vars", "url": "https://github.com/linagora/twake-drive/blob/fdc3630d21e0a5ce28b0be5cc8191cd0d3e1923b/tdrive/backend/node/src/core/platform/services/auth/service.ts#L23-L25", "sha": "fdc3630d21e0a5ce28b0be5cc8191cd0d3e1923b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "entriesApi", "code": "function entriesApi(\n items: DataTransferItemList,\n cb: (files?: File[], paths?: string[]) => void,\n ) {\n const fd: any[] = [],\n files: any[] = [],\n rootPromises: any[] = [];\n\n function readEntries(entry: any, reader: any, oldEntries: any, cb: any) {\n const dirReader = reader || entry.createReader();\n dirReader.readEntries(function (entries: any) {\n const newEntries = oldEntries ? oldEntries.concat(entries) : entries;\n if (entries.length) {\n setTimeout(readEntries.bind(null, entry, dirReader, newEntries, cb), 0);\n } else {\n cb(newEntries);\n }\n });\n }\n\n function readDirectory(entry: any, path: null | string, resolve: (v: any) => void) {\n if (!path) path = entry.name;\n readEntries(entry, 0, 0, function (entries: any[]) {\n const promises: Promise[] = [];\n entries.forEach(function (entry: any) {\n promises.push(\n new Promise(function (resolve) {\n if (entry.isFile) {\n entry.file(function (file: File) {\n const p = path + '/' + file.name;\n fd.push(file);\n files.push(p);\n if (files.length > 1000000) {\n return false;\n }\n resolve(true);\n }, resolve.bind(null, true));\n } else readDirectory(entry, path + '/' + entry.name, resolve);\n }),\n );\n });\n Promise.all(promises).then(resolve.bind(null, true));\n });\n }\n\n let timeBegin = Date.now();\n [].slice.call(items).forEach(function (entry: any) {\n entry = entry.webkitGetAsEntry();\n if (entry) {\n rootPromises.push(\n new Promise(function (resolve) {\n if (entry.isFile) {\n entry.file(function (file: File) {\n fd.push(file);\n files.push(file.name);\n if (files.length > 1000000) {\n return false;\n }\n resolve(true);\n }, resolve.bind(null, true));\n } else if (entry.isDirectory) {\n const timeToRead = Date.now();\n readDirectory(entry, null, resolve);\n }\n }),\n );\n }\n });\n\n if (files.length > 1000000) {\n return false;\n }\n\n timeBegin = Date.now();\n Promise.all(rootPromises).then(cb.bind(null, fd, files));\n }", "docstring": "// old drag and drop API implemented in Chrome 11+", "url": "https://github.com/linagora/twake-drive/blob/fdc3630d21e0a5ce28b0be5cc8191cd0d3e1923b/tdrive/frontend/src/app/components/uploads/file-tree-utils.ts#L80-L155", "sha": "fdc3630d21e0a5ce28b0be5cc8191cd0d3e1923b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Server.mount", "code": "public mount(path: string, other: Server) {\n hpx.hpxServerMount(this.serverId, path, other.serverId);\n }", "docstring": "/**\n * Defines a WebSocket route on the server.\n * @param {string} path - The route path.\n * @param {(wsClient: WebSocketClient) => any} callback -\n * The callback function to handle WebSocket connections.\n * \n * @example\n * asdasdasdasdasdsa;\n * \n */", "url": "https://github.com/HapticX/happyx/blob/20072a2b0d0c33cba350be66ff9c690871184444/bindings/node/src/index.ts#L372-L374", "sha": "20072a2b0d0c33cba350be66ff9c690871184444"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "QdrantClient.updateVectors", "code": "async updateVectors(\n collection_name: string,\n {\n wait = true,\n ordering,\n points,\n shard_key,\n }: {wait?: boolean; ordering?: SchemaFor<'WriteOrdering'>} & SchemaFor<'UpdateVectors'>,\n ) {\n const response = await this._openApiClient.points.updateVectors({\n collection_name,\n wait,\n ordering,\n points,\n shard_key,\n });\n return maybe(response.data.result).orThrow('Update vectors returned empty');\n }", "docstring": "/**\n * Update vectors\n * @param collection_name\n * @param {object} args\n * - wait: Await for the results to be processed.\n * - If `true`, result will be returned only when all changes are applied\n * - If `false`, result will be returned immediately after the confirmation of receiving.\n * - Default: `true`\n * - ordering: Define strategy for ordering of the points. Possible values:\n * - 'weak' - write operations may be reordered, works faster, default\n * - 'medium' - write operations go through dynamically selected leader,\n * may be inconsistent for a short period of time in case of leader change\n * - 'strong' - Write operations go through the permanent leader,\n * consistent, but may be unavailable if leader is down\n * - points: Points with named vectors\n * - shard_key: Specify in which shards to look for the points, if not specified - look in all shards\n * @returns Operation result\n */", "url": "https://github.com/qdrant/qdrant-js/blob/1c43b2db971367ece3788c4b6c368a1604b29530/packages/js-client-rest/src/qdrant-client.ts#L555-L572", "sha": "1c43b2db971367ece3788c4b6c368a1604b29530"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "main_main_item.init", "code": "init(init_?: typeof this.init_data): void {\n\t\t// 不存在多语言则删除组件\n\t\tif (!mk.language_manage.label_data_tab[cc.js.getClassName(main_main)]?.[this.init_data.label_s]) {\n\t\t\tthis.nodes.label.getComponent(mk.language.label)?.destroy();\n\t\t}\n\n\t\tObject.assign(this.data, this.init_data);\n\t}", "docstring": "/* ------------------------------- 生命周期 ------------------------------- */", "url": "https://github.com/1226085293/MKFramework/blob/33ed975103f1e7c74200cb658818eea945fd678f/assets/main/module/main/item/main_main_item.ts#L24-L31", "sha": "33ed975103f1e7c74200cb658818eea945fd678f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mk_storage.clear", "code": "clear(): void {\n\t\tfor (const k_s in this._cache) {\n\t\t\tthis._cache[k_s] = undefined!;\n\t\t}\n\n\t\tif (!this._init_config.name_s) {\n\t\t\treturn;\n\t\t}\n\n\t\tObject.keys(this._init_config.data).forEach((v_s) => {\n\t\t\tconst key_s = `${this._init_config.name_s}-${String(v_s)}`;\n\n\t\t\tthis._write_pipeline.add(() => {\n\t\t\t\tcc.sys.localStorage.removeItem(key_s);\n\t\t\t});\n\t\t});\n\t}", "docstring": "/** 清空当前存储器数据 */", "url": "https://github.com/1226085293/MKFramework/blob/33ed975103f1e7c74200cb658818eea945fd678f/extensions/MKFramework/assets/mk-framework/@framework/mk_storage.ts#L136-L152", "sha": "33ed975103f1e7c74200cb658818eea945fd678f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mk_asset.release", "code": "release(asset_: cc.Asset | cc.Asset[]): void {\n\t\tconst asset_as: cc.Asset[] = Array.isArray(asset_) ? asset_ : [asset_];\n\n\t\tasset_as.forEach((v) => {\n\t\t\tif (!v.isValid) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// 释放动态图集中的资源\n\t\t\tif (cc.dynamicAtlasManager?.enabled) {\n\t\t\t\tif (v instanceof cc.SpriteFrame) {\n\t\t\t\t\tcc.dynamicAtlasManager.deleteAtlasSpriteFrame(v);\n\t\t\t\t} else if (v instanceof cc.Texture2D) {\n\t\t\t\t\tcc.dynamicAtlasManager.deleteAtlasTexture(v);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// 更新引用计数\n\t\t\tfor (let k_n = 0; k_n < v.refCount; k_n++) {\n\t\t\t\tv.decRef(false);\n\t\t\t}\n\n\t\t\t// 释放资源,禁止自动释放,否则会出现释放后立即加载当前资源导致加载返回资源是已释放后的\n\t\t\tcc.assetManager.releaseAsset(v);\n\t\t\t// 更新资源管理表\n\t\t\tthis._asset_manage_map.delete(v.nativeUrl || v._uuid);\n\n\t\t\tthis._log.debug(\"释放资源\", v.name, v.nativeUrl, v._uuid);\n\t\t});\n\t}", "docstring": "/**\n\t * 释放资源\n\t * @param asset_ 释放的资源\n\t */", "url": "https://github.com/1226085293/MKFramework/blob/33ed975103f1e7c74200cb658818eea945fd678f/extensions/MKFramework/assets/mk-framework/@framework/resources/mk_asset.ts#L460-L489", "sha": "33ed975103f1e7c74200cb658818eea945fd678f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getTagToFindSuggestion", "code": "function getTagToFindSuggestion(\n\tapp: App,\n\tsettings: AutoTimelineSettings,\n\tquery: string\n): string[] {\n\tconst unfilteredRestults = app.vault\n\t\t.getMarkdownFiles()\n\t\t.reduce((accumulator, file) => {\n\t\t\tconst cachedMetadata = app.metadataCache.getFileCache(file);\n\n\t\t\tif (!cachedMetadata || !cachedMetadata.frontmatter)\n\t\t\t\treturn accumulator;\n\t\t\taccumulator.push(\n\t\t\t\t...getTagsFromMetadataOrTagObject(\n\t\t\t\t\tsettings,\n\t\t\t\t\tcachedMetadata.frontmatter,\n\t\t\t\t\tcachedMetadata.tags\n\t\t\t\t)\n\t\t\t);\n\t\t\treturn accumulator;\n\t\t}, []);\n\n\tconst allQueries = query.split(settings.markdownBlockTagsToFindSeparator);\n\tconst currentQuery = allQueries[allQueries.length - 1];\n\tconst allreadyUsedQueries = allQueries.slice(0, -1);\n\n\treturn filterAndSortSuggestionResults(\n\t\tunfilteredRestults,\n\t\tcurrentQuery,\n\t\tallreadyUsedQueries\n\t);\n}", "docstring": "/**\n * Given a query get all the `TagToFind` typed suggestions.\n * This will go trough every cached file and retrieve all timeline metadata + tags.\n *\n * @param app - The obsidian app object.\n * @param settings - The plugin settings.\n * @param query - The query that the user just typed.\n * @returns the suggestions set.\n */", "url": "https://github.com/April-Gras/obsidian-auto-timelines/blob/efb47f419c90db6a65e865c440958ace84680aa0/src/suggester.ts#L272-L303", "sha": "efb47f419c90db6a65e865c440958ace84680aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChatflowTool._call", "code": "protected async _call(\n arg: z.infer,\n _?: CallbackManagerForToolRun,\n flowConfig?: { sessionId?: string; chatId?: string; input?: string }\n ): Promise {\n const inputQuestion = this.input || arg.input\n\n const body = {\n question: inputQuestion,\n chatId: this.startNewSession ? uuidv4() : flowConfig?.chatId,\n overrideConfig: {\n sessionId: this.startNewSession ? uuidv4() : flowConfig?.sessionId,\n ...(this.overrideConfig ?? {}),\n ...(arg.overrideConfig ?? {})\n }\n }\n\n const options = {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...this.headers\n },\n body: JSON.stringify(body)\n }\n\n let sandbox = {\n $callOptions: options,\n $callBody: body,\n util: undefined,\n Symbol: undefined,\n child_process: undefined,\n fs: undefined,\n process: undefined\n }\n\n const code = `\nconst fetch = require('node-fetch');\nconst url = \"${this.baseURL}/api/v1/prediction/${this.chatflowid}\";\n\nconst body = $callBody;\n\nconst options = $callOptions;\n\ntry {\n\tconst response = await fetch(url, options);\n\tconst resp = await response.json();\n\treturn resp.text;\n} catch (error) {\n\tconsole.error(error);\n\treturn '';\n}\n`\n const builtinDeps = process.env.TOOL_FUNCTION_BUILTIN_DEP\n ? defaultAllowBuiltInDep.concat(process.env.TOOL_FUNCTION_BUILTIN_DEP.split(','))\n : defaultAllowBuiltInDep\n const externalDeps = process.env.TOOL_FUNCTION_EXTERNAL_DEP ? process.env.TOOL_FUNCTION_EXTERNAL_DEP.split(',') : []\n const deps = availableDependencies.concat(externalDeps)\n\n const vmOptions = {\n console: 'inherit',\n sandbox,\n require: {\n external: { modules: deps },\n builtin: builtinDeps\n },\n eval: false,\n wasm: false,\n timeout: 10000\n } as any\n\n const vm = new NodeVM(vmOptions)\n const response = await vm.run(`module.exports = async function() {${code}}()`, __dirname)\n\n return response\n }", "docstring": "// @ts-ignore", "url": "https://github.com/FlowiseAI/Flowise/blob/c0a74782d8f1dbe118d2ed3aa40dd292d25d9119/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts#L295-L370", "sha": "c0a74782d8f1dbe118d2ed3aa40dd292d25d9119"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "currentAgentMeta", "code": "const currentAgentMeta = (s: SessionStore): MetaData => {\n const isInbox = sessionSelectors.isInboxSession(s);\n\n const defaultMeta = {\n avatar: isInbox ? DEFAULT_INBOX_AVATAR : DEFAULT_AVATAR,\n backgroundColor: DEFAULT_BACKGROUND_COLOR,\n description: isInbox\n ? t('inbox.desc', { ns: 'chat' })\n : currentAgentSystemRole(s) || t('noDescription'),\n title: isInbox ? t('inbox.title', { ns: 'chat' }) : t('defaultSession'),\n };\n\n const session = sessionSelectors.currentSession(s);\n\n return merge(defaultMeta, session?.meta);\n};", "docstring": "// ========== Meta ============== //", "url": "https://github.com/chatchat-space/Langchain-Chatchat/blob/40994eb6c3c8aeb9af4d52123abfb471a3f27b9c/frontend/src/store/session/slices/agent/selectors.ts#L79-L94", "sha": "40994eb6c3c8aeb9af4d52123abfb471a3f27b9c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TestCaseRun.fillExecutedBddStep", "code": "private fillExecutedBddStep(bddStep: BddStepData, possiblePwSteps: pw.TestStep[]) {\n const pwStep = this.findPlaywrightStep(possiblePwSteps, bddStep);\n if (pwStep?.error) {\n this.registerErrorStep(pwStep, pwStep.error);\n }\n if (this.isTimeouted() && pwStep && isUnknownDuration(pwStep)) {\n this.registerTimeoutedStep(pwStep);\n }\n if (pwStep?.parent && bddStep.isBg) {\n this.bgRoots.add(pwStep.parent);\n }\n if (pwStep) {\n this.attachmentMapper.populateStepAttachments(pwStep);\n }\n\n return { bddStep, pwStep };\n }", "docstring": "// eslint-disable-next-line visual/complexity", "url": "https://github.com/vitalets/playwright-bdd/blob/fc78801c6c525543fb3f26d2261567da32c3e808/src/reporter/cucumber/messagesBuilder/TestCaseRun.ts#L109-L125", "sha": "fc78801c6c525543fb3f26d2261567da32c3e808"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ComfyApp.graphToPrompt", "code": "graphToPrompt(workflow: ComfyBoxWorkflow, tag: string | null = null): SerializedPrompt {\n return this.promptSerializer.serialize(workflow.graph, tag)\n }", "docstring": "/**\n * Converts the current graph workflow for sending to the API\n * @returns The workflow and node links\n */", "url": "https://github.com/space-nuko/ComfyBox/blob/abd31401f0e159af84040453866d308e519ca819/src/lib/components/ComfyApp.ts#L1016-L1018", "sha": "abd31401f0e159af84040453866d308e519ca819"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "insertTemplate", "code": "function insertTemplate(template: SerializedComfyBoxTemplate, graph: LGraph, templateNodeIDToNode: Record, container: ContainerLayout, childIndex: number): IDragItem {\n const idMapping: Record = {};\n\n const getDragItemID = (id: DragItemID): DragItemID => {\n idMapping[id] ||= uuidv4();\n return idMapping[id];\n }\n\n // Ensure all IDs are unique, and rewrite node IDs in widgets to point\n // to newly created nodes\n for (const [id, entry] of Object.entries(template.layout.allItems)) {\n const newId = getDragItemID(id);\n template.layout.allItems[newId] = entry;\n entry.dragItem.id = newId;\n\n if (entry.parent)\n entry.parent = getDragItemID(entry.parent)\n entry.children = entry.children.map(getDragItemID);\n\n if (entry.dragItem.type === \"widget\") {\n entry.dragItem.nodeId = templateNodeIDToNode[entry.dragItem.nodeId].id;\n }\n }\n\n if (template.layout.root) {\n template.layout.root = getDragItemID(template.layout.root)\n\n // make sure the new root doesn't have a parent since that parent\n // was detached from the serialized layout and won't be found in\n // template.layout.allItems\n template.layout.allItems[template.layout.root].parent = null;\n }\n\n const raw = deserializeRaw(template.layout, graph);\n\n // merge the template's detached layout tree into this layout\n store.update(s => {\n s.allItems = { ...s.allItems, ...raw.allItems }\n s.allItemsByNode = { ...s.allItemsByNode, ...raw.allItemsByNode }\n return s;\n })\n\n moveItem(raw.root, container, childIndex);\n\n return raw.root\n }", "docstring": "/*\n * NOTE: Modifies the template in-place, be sure you cloned it beforehand!\n */", "url": "https://github.com/space-nuko/ComfyBox/blob/abd31401f0e159af84040453866d308e519ca819/src/lib/stores/layoutStates.ts#L1094-L1139", "sha": "abd31401f0e159af84040453866d308e519ca819"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExecutionCommandProvider.handleChangeDirectory", "code": "async handleChangeDirectory (uri: vscode.Uri): Promise {\n this._telemetryLogger.logEvent({\n eventKey: 'ML_VS_CODE_ACTIONS',\n data: {\n action_type: 'changeDirectory',\n result: ''\n }\n });\n\n await this._terminalService.openTerminalOrBringToFront();\n\n try {\n await this._mvm.getReadyPromise();\n } catch (e) {\n return;\n }\n\n void this._mvm.feval('cd', 0, [uri.fsPath]);\n }", "docstring": "/**\n * Implements the MATLAB change directory action\n * @param uri The file path that MATLAB should \"cd\" to\n * @returns\n */", "url": "https://github.com/mathworks/MATLAB-extension-for-vscode/blob/1cdb3395a62bff2bb8e08229ca19c4d6f5750d9c/src/commandwindow/ExecutionCommandProvider.ts#L282-L300", "sha": "1cdb3395a62bff2bb8e08229ca19c4d6f5750d9c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_convertAgentStepToMessages", "code": "function _convertAgentStepToMessages(\n action: AgentAction | FunctionsAgentAction | ToolsAgentAction,\n observation: string\n) {\n if (isToolsAgentAction(action) && action.toolCallId !== undefined) {\n const log = action.messageLog as BaseMessage[]\n if (observation.length < 1) {\n observation = `The tool ${action.tool} returned no output.`\n }\n return log.concat(\n new ToolMessage({\n content: observation,\n name: action.tool,\n tool_call_id: action.toolCallId\n })\n )\n } else if (\n isFunctionsAgentAction(action) &&\n action.messageLog !== undefined\n ) {\n return action.messageLog?.concat(\n new FunctionMessage(observation, action.tool)\n )\n } else {\n return [new AIMessage(action.log)]\n }\n}", "docstring": "// eslint-disable-next-line @typescript-eslint/naming-convention", "url": "https://github.com/ChatLunaLab/chatluna/blob/a52137997f69d9a76e766f20c2b4daf1484e7452/packages/core/src/llm-core/agent/openai/index.ts#L43-L69", "sha": "a52137997f69d9a76e766f20c2b4daf1484e7452"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NoopVizualizationBuilder.configure", "code": "async configure() {\n const adapter = await navigator.gpu.requestAdapter();\n if (adapter === null) {\n throw new VisualizerError('Unable to request webgpu adapter');\n }\n\n this.device = await adapter.requestDevice();\n if (this.device === null) {\n throw new VisualizerError('Unable to get WebGPU device');\n }\n }", "docstring": "/**\n * @inheritDoc VisualizationBuilder.configure\n */", "url": "https://github.com/google/tour-of-wgsl/blob/cffb757ef2f0c4554d6fa7c061337ecf7c17190b/assets/ts/noop_visualizer.ts#L27-L37", "sha": "cffb757ef2f0c4554d6fa7c061337ecf7c17190b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GeoUtil.getNorthByPointAB", "code": "public static getNorthByPointAB(point_a: Point, point_b: Point): number {\n\t\tlet point_c = new Point(point_a.lng, point_b.lat);\n\t\tlet ab = this.getDistance(point_a, point_b);\n\t\tlet bc = this.getDistance(point_b, point_c);\n\t\t// 0\n\t\tif (point_a.lng == point_b.lng && point_a.lat == point_b.lat) {\n\t\t\treturn 0;\n\t\t}\n\t\t// 0 <= n <= 90\n\t\tif (point_a.lng <= point_b.lng && point_a.lat <= point_b.lat) {\n\t\t\tif (bc < ab) {\n\t\t\t\tlet BAC = Math.asin(bc / ab);\n\t\t\t\treturn this.radToDeg(BAC);\n\t\t\t} else {\n\t\t\t\treturn 90;\n\t\t\t}\n\t\t}\n\t\t// 90 < n <= 180\n\t\tif (point_a.lng <= point_b.lng && point_a.lat > point_b.lat) {\n\t\t\tif (bc < ab) {\n\t\t\t\tlet BAC = Math.asin(bc / ab);\n\t\t\t\treturn 180 - this.radToDeg(BAC);\n\t\t\t} else {\n\t\t\t\treturn 90;\n\t\t\t}\n\t\t}\n\t\t// 180 < n <= 270\n\t\tif (point_a.lng >= point_b.lng && point_a.lat >= point_b.lat) {\n\t\t\tif (bc < ab) {\n\t\t\t\tlet BAC = Math.asin(bc / ab);\n\t\t\t\treturn 180 + this.radToDeg(BAC);\n\t\t\t} else {\n\t\t\t\treturn 270;\n\t\t\t}\n\t\t}\n\t\t// 270 < n <= 360\n\t\tif (point_a.lng >= point_b.lng && point_a.lat < point_b.lat) {\n\t\t\tif (bc < ab) {\n\t\t\t\tlet BAC = Math.asin(bc / ab);\n\t\t\t\treturn 360 - this.radToDeg(BAC);\n\t\t\t} else {\n\t\t\t\treturn 270;\n\t\t\t}\n\t\t}\n\t\treturn 0;\n\t}", "docstring": "/** 根据一点的经纬度求另一点与其相对的北向夹角 */", "url": "https://github.com/kurimuson/java_map_download/blob/743dfa5b699efc44efa85004f73bd6fe7fca13ef/Web/src/app/map/geo-util.ts#L475-L520", "sha": "743dfa5b699efc44efa85004f73bd6fe7fca13ef"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChatUI.asyncGenerate", "code": "private async asyncGenerate() {\n await this.asyncInitChat();\n this.requestInProgress = true;\n const prompt = this.uiChatInput.value;\n if (prompt == \"\") {\n this.requestInProgress = false;\n return;\n }\n\n this.appendMessage(\"right\", prompt);\n this.uiChatInput.value = \"\";\n this.uiChatInput.setAttribute(\"placeholder\", \"Generating...\");\n\n this.appendMessage(\"left\", \"\");\n this.chatHistory.push({ role: \"user\", content: prompt });\n\n try {\n let curMessage = \"\";\n let usage: webllm.CompletionUsage | undefined = undefined;\n const completion = await this.engine.chat.completions.create({\n stream: true,\n messages: this.chatHistory,\n stream_options: { include_usage: true },\n });\n // TODO(Charlie): Processing of � requires changes\n for await (const chunk of completion) {\n const curDelta = chunk.choices[0]?.delta.content;\n if (curDelta) {\n curMessage += curDelta;\n }\n this.updateLastMessage(\"left\", curMessage);\n if (chunk.usage) {\n usage = chunk.usage;\n }\n }\n if (usage) {\n this.uiChatInfoLabel.innerHTML =\n `prompt_tokens: ${usage.prompt_tokens}, ` +\n `completion_tokens: ${usage.completion_tokens}, ` +\n `prefill: ${usage.extra.prefill_tokens_per_s.toFixed(4)} tokens/sec, ` +\n `decoding: ${usage.extra.decode_tokens_per_s.toFixed(4)} tokens/sec`;\n }\n const finalMessage = await this.engine.getMessage();\n this.updateLastMessage(\"left\", finalMessage); // TODO: Remove this after � issue is fixed\n this.chatHistory.push({ role: \"assistant\", content: finalMessage });\n } catch (err) {\n this.appendMessage(\"error\", \"Generate error, \" + err.toString());\n console.log(err.stack);\n await this.unloadChat();\n }\n this.uiChatInput.setAttribute(\"placeholder\", \"Enter your message...\");\n this.requestInProgress = false;\n }", "docstring": "/**\n * Run generate\n */", "url": "https://github.com/mlc-ai/web-llm/blob/632d34725629b480b5b2772379ef5c150b1286f0/examples/simple-chat-upload/src/simple_chat.ts#L263-L315", "sha": "632d34725629b480b5b2772379ef5c150b1286f0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "resolveConstrainedBigint", "code": "const resolveConstrainedBigint = (\n number: bigint,\n constraint: TypeConstraint\n) => {\n const min = constraint.min && constraint.min + BigInt(constraint.minExclusive ? 1 : 0)\n const max = constraint.max && constraint.max - BigInt(constraint.maxExclusive ? 1 : 0)\n\n let result: number | bigint = number\n\n if (min && number < min) {\n result = min\n }\n if (max && number > max) {\n result = max\n }\n\n return result\n}", "docstring": "/** @internal */", "url": "https://github.com/sukovanej/effect-http/blob/598af7145b6b72a49adb68f86411f9f3053a114b/packages/effect-http/src/internal/example-compiler.ts#L299-L316", "sha": "598af7145b6b72a49adb68f86411f9f3053a114b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createParameters", "code": "const createParameters = (\n type: \"query\" | \"header\" | \"path\",\n schema: Schema.Schema,\n componentSchemaCallback: ComponentSchemaCallback\n): Array => {\n return getPropertySignatures(type, schema.ast).map((ps) => {\n if (typeof ps.name !== \"string\") {\n throw new Error(`${type} parameter struct fields must be strings`)\n }\n\n const ast = (ps.type._tag === \"Union\" && ps.type.types.some(AST.isUndefinedKeyword)) ?\n AST.Union.make(\n ps.type.types.filter((ast) => !AST.isUndefinedKeyword(ast)),\n ps.type.annotations\n ) :\n ps.type\n\n const schema = Schema.make(ast)\n\n const parameter: OpenApiTypes.OpenAPISpecParameter = {\n name: ps.name,\n in: type,\n schema: makeSchema(schema, componentSchemaCallback)\n }\n\n if (!ps.isOptional) {\n parameter[\"required\"] = true\n }\n\n const description = AST.getDescriptionAnnotation(schema.ast)\n if (Option.isSome(description)) {\n parameter[\"description\"] = description.value\n }\n\n return parameter\n })\n}", "docstring": "/** @internal */", "url": "https://github.com/sukovanej/effect-http/blob/598af7145b6b72a49adb68f86411f9f3053a114b/packages/effect-http/src/internal/open-api.ts#L285-L321", "sha": "598af7145b6b72a49adb68f86411f9f3053a114b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "stopAudio", "code": "function stopAudio(): void {\n // stop the audio playback\n currentAudio?.pause();\n currentAudio = null;\n\n // update audio playback state\n isPlaying = false;\n\n // clear the audioQueue\n audioQueue.length = 0;\n }", "docstring": "/**\n * stops audio playback, clears audio playback queue, and updates audio playback state\n */", "url": "https://github.com/HumeAI/hume-api-examples/blob/2c4be65c2bc56604cfedb0d129856a038e415f17/evi-typescript-function-calling/src/main.ts#L209-L219", "sha": "2c4be65c2bc56604cfedb0d129856a038e415f17"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MD5Hashing.getInstance", "code": "public static getInstance(): MD5Hashing {\n if (!MD5Hashing.instance)\n MD5Hashing.instance = new MD5Hashing()\n\n return MD5Hashing.instance\n }", "docstring": "// 获取单例实例", "url": "https://github.com/yudaocode/yudao-ui-admin-vben/blob/5e00382e0af48d8dc8da33de6902b1289311c28d/src/utils/cipher.ts#L85-L90", "sha": "5e00382e0af48d8dc8da33de6902b1289311c28d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getEnvProviderSsmParams", "code": "const getEnvProviderSsmParams = async ()\n : Promise<{ [key: string]: string; }> => {\n const params = (await Promise.all(\n paramKeys.map(async (paramKey): Promise<{ [key: string]: string; }> => {\n const val = await getPlatformAccountSSMParameterValue(paramKey, region, ctx.logger);\n return {\n [paramKey]: val\n };\n })\n )).reduce((acc, paramKeyValMap) => {\n const typedAcc: { [key: string]: string; } = acc;\n const key = Object.keys(paramKeyValMap)[0];\n return {\n ...typedAcc, [key]: paramKeyValMap[key]\n };\n }, {});\n\n return params;\n\n };", "docstring": "// Get a key/value map of SSM parameters", "url": "https://github.com/awslabs/harmonix/blob/fd3d3e2c41f68775e69259d70f2e8e500f885234/backstage-plugins/plugins/scaffolder-backend-module-aws-apps/src/actions/get-platform-parameters/get-platform-parameters.ts#L102-L121", "sha": "fd3d3e2c41f68775e69259d70f2e8e500f885234"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "put_bits", "code": "static inline void put_bits(limb_t *tab, limb_t len, slimb_t pos, limb_t val)\n{\n limb_t i;\n int p;\n\n i = pos >> LIMB_LOG2_BITS;\n p = pos & (LIMB_BITS - 1);\n if (i < len)\n tab[i] |= val << p;\n if (p != 0) {\n i++;\n if (i < len) {\n tab[i] |= val >> (LIMB_BITS - p);\n }\n }\n}", "docstring": "/***************************************************************/\n/* Integer multiplication with FFT */\n/* or LIMB_BITS at bit position 'pos' in tab */", "url": "https://github.com/vitoplantamura/BugChecker/blob/8b81e76efe457b59be3a6e752efd43916ba0cabb/BugChecker/QuickJS/libbf.c#L7241-L7256", "sha": "8b81e76efe457b59be3a6e752efd43916ba0cabb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ZyanBitsetPerformByteOperation", "code": "ZyanStatus ZyanBitsetPerformByteOperation(ZyanBitset* destination, const ZyanBitset* source,\n ZyanBitsetByteOperation operation)\n{\n if (!destination || !source || !operation)\n {\n return ZYAN_STATUS_INVALID_ARGUMENT;\n }\n\n ZyanUSize s1;\n ZyanUSize s2;\n ZYAN_CHECK(ZyanVectorGetSize(&destination->bits, &s1));\n ZYAN_CHECK(ZyanVectorGetSize(&source->bits, &s2));\n\n const ZyanUSize min = ZYAN_MIN(s1, s2);\n for (ZyanUSize i = 0; i < min; ++i)\n {\n ZyanU8* v1;\n const ZyanU8* v2;\n ZYAN_CHECK(ZyanVectorGetPointerMutable(&destination->bits, i, (void**)&v1));\n ZYAN_CHECK(ZyanVectorGetPointer(&source->bits, i, (const void**)&v2));\n\n ZYAN_ASSERT(v1);\n ZYAN_ASSERT(v2);\n\n ZYAN_CHECK(operation(v1, v2));\n }\n\n return ZYAN_STATUS_SUCCESS;\n}", "docstring": "/* ---------------------------------------------------------------------------------------------- */\n/* Logical operations */\n/* ---------------------------------------------------------------------------------------------- */", "url": "https://github.com/vitoplantamura/BugChecker/blob/8b81e76efe457b59be3a6e752efd43916ba0cabb/dependencies/zydis/dependencies/zycore/src/Bitset.c#L196-L224", "sha": "8b81e76efe457b59be3a6e752efd43916ba0cabb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "eval_gbmd", "code": "static int eval_gbmd(acpi_handle handle, unsigned long *res)\n{\n\treturn eval_int(handle, \"VPC0.GBMD\", res);\n}", "docstring": "//static int eval_qcho(acpi_handle handle, unsigned long *res)\n//{\n//\t// \\_SB.PCI0.LPC0.EC0.QCHO\n//\treturn eval_int(handle, \"QCHO\", res);\n//}", "url": "https://github.com/johnfanv2/LenovoLegionLinux/blob/ae96a82adb879e729c5727c8769888b47ee1181f/kernel_module/legion-laptop.c#L1391-L1394", "sha": "ae96a82adb879e729c5727c8769888b47ee1181f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "f_sync", "code": "FRESULT f_sync (\n\tFIL* fp\t\t/* Open file to be synced */\n)\n{\n\tFRESULT res;\n\tFATFS *fs;\n\tDWORD tm;\n\tBYTE *dir;\n\n\n\tres = validate(&fp->obj, &fs);\t/* Check validity of the file object */\n\tif (res == FR_OK) {\n\t\tif (fp->flag & FA_MODIFIED) {\t/* Is there any change to the file? */\n#if !FF_FS_TINY\n\t\t\tif (fp->flag & FA_DIRTY) {\t/* Write-back cached data if needed */\n\t\t\t\tif (disk_write(fs->pdrv, fp->buf, fp->sect, 1) != RES_OK) LEAVE_FF(fs, FR_DISK_ERR);\n\t\t\t\tfp->flag &= (BYTE)~FA_DIRTY;\n\t\t\t}\n#endif\n\t\t\t/* Update the directory entry */\n\t\t\ttm = GET_FATTIME();\t\t\t\t/* Modified time */\n#if FF_FS_EXFAT\n\t\t\tif (fs->fs_type == FS_EXFAT) {\n\t\t\t\tres = fill_first_frag(&fp->obj);\t/* Fill first fragment on the FAT if needed */\n\t\t\t\tif (res == FR_OK) {\n\t\t\t\t\tres = fill_last_frag(&fp->obj, fp->clust, 0xFFFFFFFF);\t/* Fill last fragment on the FAT if needed */\n\t\t\t\t}\n\t\t\t\tif (res == FR_OK) {\n\t\t\t\t\tDIR dj;\n\t\t\t\t\tDEF_NAMBUF\n\n\t\t\t\t\tINIT_NAMBUF(fs);\n\t\t\t\t\tres = load_obj_xdir(&dj, &fp->obj);\t/* Load directory entry block */\n\t\t\t\t\tif (res == FR_OK) {\n\t\t\t\t\t\tfs->dirbuf[XDIR_Attr] |= AM_ARC;\t\t\t\t/* Set archive attribute to indicate that the file has been changed */\n\t\t\t\t\t\tfs->dirbuf[XDIR_GenFlags] = fp->obj.stat | 1;\t/* Update file allocation information */\n\t\t\t\t\t\tst_dword(fs->dirbuf + XDIR_FstClus, fp->obj.sclust);\t\t/* Update start cluster */\n\t\t\t\t\t\tst_qword(fs->dirbuf + XDIR_FileSize, fp->obj.objsize);\t\t/* Update file size */\n\t\t\t\t\t\tst_qword(fs->dirbuf + XDIR_ValidFileSize, fp->obj.objsize);\t/* (FatFs does not support Valid File Size feature) */\n\t\t\t\t\t\tst_dword(fs->dirbuf + XDIR_ModTime, tm);\t\t/* Update modified time */\n\t\t\t\t\t\tfs->dirbuf[XDIR_ModTime10] = 0;\n\t\t\t\t\t\tst_dword(fs->dirbuf + XDIR_AccTime, 0);\n\t\t\t\t\t\tres = store_xdir(&dj);\t/* Restore it to the directory */\n\t\t\t\t\t\tif (res == FR_OK) {\n\t\t\t\t\t\t\tres = sync_fs(fs);\n\t\t\t\t\t\t\tfp->flag &= (BYTE)~FA_MODIFIED;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tFREE_NAMBUF();\n\t\t\t\t}\n\t\t\t} else\n#endif\n\t\t\t{\n\t\t\t\tres = move_window(fs, fp->dir_sect);\n\t\t\t\tif (res == FR_OK) {\n\t\t\t\t\tdir = fp->dir_ptr;\n\t\t\t\t\tdir[DIR_Attr] |= AM_ARC;\t\t\t\t\t\t/* Set archive attribute to indicate that the file has been changed */\n\t\t\t\t\tst_clust(fp->obj.fs, dir, fp->obj.sclust);\t\t/* Update file allocation information */\n\t\t\t\t\tst_dword(dir + DIR_FileSize, (DWORD)fp->obj.objsize);\t/* Update file size */\n\t\t\t\t\tst_dword(dir + DIR_ModTime, tm);\t\t\t\t/* Update modified time */\n\t\t\t\t\tst_word(dir + DIR_LstAccDate, 0);\n\t\t\t\t\tfs->wflag = 1;\n\t\t\t\t\tres = sync_fs(fs);\t\t\t\t\t/* Restore it to the directory */\n\t\t\t\t\tfp->flag &= (BYTE)~FA_MODIFIED;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tLEAVE_FF(fs, res);\n}", "docstring": "/*-----------------------------------------------------------------------*/\n/* Synchronize the File */\n/*-----------------------------------------------------------------------*/", "url": "https://github.com/toniebox-reverse-engineering/teddycloud/blob/83d3b29cbfc74e8f76f48f8782646c8e464055b2/fat/source/ff.c#L4135-L4205", "sha": "83d3b29cbfc74e8f76f48f8782646c8e464055b2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "putc_bfd", "code": "static void putc_bfd (putbuff* pb, TCHAR c)\n{\n\tUINT n;\n\tint i, nc;\n#if FF_USE_LFN && FF_LFN_UNICODE\n\tWCHAR hs, wc;\n#if FF_LFN_UNICODE == 2\n\tDWORD dc;\n\tconst TCHAR* tp;\n#endif\n#endif\n\n\tif (FF_USE_STRFUNC == 2 && c == '\\n') {\t /* LF -> CRLF conversion */\n\t\tputc_bfd(pb, '\\r');\n\t}\n\n\ti = pb->idx;\t\t\t/* Write index of pb->buf[] */\n\tif (i < 0) return;\t\t/* In write error? */\n\tnc = pb->nchr;\t\t\t/* Write unit counter */\n\n#if FF_USE_LFN && FF_LFN_UNICODE\n#if FF_LFN_UNICODE == 1\t\t/* UTF-16 input */\n\tif (IsSurrogateH(c)) {\t/* Is this a high-surrogate? */\n\t\tpb->hs = c; return;\t/* Save it for next */\n\t}\n\ths = pb->hs; pb->hs = 0;\n\tif (hs != 0) {\t\t\t/* Is there a leading high-surrogate? */\n\t\tif (!IsSurrogateL(c)) hs = 0;\t/* Discard high-surrogate if not a surrogate pair */\n\t} else {\n\t\tif (IsSurrogateL(c)) return;\t/* Discard stray low-surrogate */\n\t}\n\twc = c;\n#elif FF_LFN_UNICODE == 2\t/* UTF-8 input */\n\tfor (;;) {\n\t\tif (pb->ct == 0) {\t/* Out of multi-byte sequence? */\n\t\t\tpb->bs[pb->wi = 0] = (BYTE)c;\t/* Save 1st byte */\n\t\t\tif ((BYTE)c < 0x80) break;\t\t\t\t\t/* Single byte code? */\n\t\t\tif (((BYTE)c & 0xE0) == 0xC0) pb->ct = 1;\t/* 2-byte sequence? */\n\t\t\tif (((BYTE)c & 0xF0) == 0xE0) pb->ct = 2;\t/* 3-byte sequence? */\n\t\t\tif (((BYTE)c & 0xF8) == 0xF0) pb->ct = 3;\t/* 4-byte sequence? */\n\t\t\treturn;\t\t\t\t\t\t\t\t\t\t/* Wrong leading byte (discard it) */\n\t\t} else {\t\t\t\t/* In the multi-byte sequence */\n\t\t\tif (((BYTE)c & 0xC0) != 0x80) {\t/* Broken sequence? */\n\t\t\t\tpb->ct = 0; continue;\t\t/* Discard the sequense */\n\t\t\t}\n\t\t\tpb->bs[++pb->wi] = (BYTE)c;\t/* Save the trailing byte */\n\t\t\tif (--pb->ct == 0) break;\t/* End of the sequence? */\n\t\t\treturn;\n\t\t}\n\t}\n\ttp = (const TCHAR*)pb->bs;\n\tdc = tchar2uni(&tp);\t\t\t/* UTF-8 ==> UTF-16 */\n\tif (dc == 0xFFFFFFFF) return;\t/* Wrong code? */\n\ths = (WCHAR)(dc >> 16);\n\twc = (WCHAR)dc;\n#elif FF_LFN_UNICODE == 3\t/* UTF-32 input */\n\tif (IsSurrogate(c) || c >= 0x110000) return;\t/* Discard invalid code */\n\tif (c >= 0x10000) {\t\t/* Out of BMP? */\n\t\ths = (WCHAR)(0xD800 | ((c >> 10) - 0x40)); \t/* Make high surrogate */\n\t\twc = 0xDC00 | (c & 0x3FF);\t\t\t\t\t/* Make low surrogate */\n\t} else {\n\t\ths = 0;\n\t\twc = (WCHAR)c;\n\t}\n#endif\n\t/* A code point in UTF-16 is available in hs and wc */\n\n#if FF_STRF_ENCODE == 1\t\t/* Write a code point in UTF-16LE */\n\tif (hs != 0) {\t/* Surrogate pair? */\n\t\tst_word(&pb->buf[i], hs);\n\t\ti += 2;\n\t\tnc++;\n\t}\n\tst_word(&pb->buf[i], wc);\n\ti += 2;\n#elif FF_STRF_ENCODE == 2\t/* Write a code point in UTF-16BE */\n\tif (hs != 0) {\t/* Surrogate pair? */\n\t\tpb->buf[i++] = (BYTE)(hs >> 8);\n\t\tpb->buf[i++] = (BYTE)hs;\n\t\tnc++;\n\t}\n\tpb->buf[i++] = (BYTE)(wc >> 8);\n\tpb->buf[i++] = (BYTE)wc;\n#elif FF_STRF_ENCODE == 3\t/* Write a code point in UTF-8 */\n\tif (hs != 0) {\t/* 4-byte sequence? */\n\t\tnc += 3;\n\t\ths = (hs & 0x3FF) + 0x40;\n\t\tpb->buf[i++] = (BYTE)(0xF0 | hs >> 8);\n\t\tpb->buf[i++] = (BYTE)(0x80 | (hs >> 2 & 0x3F));\n\t\tpb->buf[i++] = (BYTE)(0x80 | (hs & 3) << 4 | (wc >> 6 & 0x0F));\n\t\tpb->buf[i++] = (BYTE)(0x80 | (wc & 0x3F));\n\t} else {\n\t\tif (wc < 0x80) {\t/* Single byte? */\n\t\t\tpb->buf[i++] = (BYTE)wc;\n\t\t} else {\n\t\t\tif (wc < 0x800) {\t/* 2-byte sequence? */\n\t\t\t\tnc += 1;\n\t\t\t\tpb->buf[i++] = (BYTE)(0xC0 | wc >> 6);\n\t\t\t} else {\t\t\t/* 3-byte sequence */\n\t\t\t\tnc += 2;\n\t\t\t\tpb->buf[i++] = (BYTE)(0xE0 | wc >> 12);\n\t\t\t\tpb->buf[i++] = (BYTE)(0x80 | (wc >> 6 & 0x3F));\n\t\t\t}\n\t\t\tpb->buf[i++] = (BYTE)(0x80 | (wc & 0x3F));\n\t\t}\n\t}\n#else\t\t\t\t\t\t/* Write a code point in ANSI/OEM */\n\tif (hs != 0) return;\n\twc = ff_uni2oem(wc, CODEPAGE);\t/* UTF-16 ==> ANSI/OEM */\n\tif (wc == 0) return;\n\tif (wc >= 0x100) {\n\t\tpb->buf[i++] = (BYTE)(wc >> 8); nc++;\n\t}\n\tpb->buf[i++] = (BYTE)wc;\n#endif\n\n#else\t\t\t\t\t\t\t/* ANSI/OEM input (without re-encoding) */\n\tpb->buf[i++] = (BYTE)c;\n#endif\n\n\tif (i >= (int)(sizeof pb->buf) - 4) {\t/* Write buffered characters to the file */\n\t\tf_write(pb->fp, pb->buf, (UINT)i, &n);\n\t\ti = (n == (UINT)i) ? 0 : -1;\n\t}\n\tpb->idx = i;\n\tpb->nchr = nc + 1;\n}", "docstring": "/* Buffered file write with code conversion */", "url": "https://github.com/toniebox-reverse-engineering/teddycloud/blob/83d3b29cbfc74e8f76f48f8782646c8e464055b2/fat/source/ff.c#L6578-L6704", "sha": "83d3b29cbfc74e8f76f48f8782646c8e464055b2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "gpio_7segment_disconnect_pin", "code": "void gpio_7segment_disconnect_pin(const GpioPin* pin) {\n furi_hal_gpio_init(pin, GpioModeOutputOpenDrain, GpioPullNo, GpioSpeedLow);\n furi_hal_gpio_write(pin, true);\n}", "docstring": "// Disconnects a GpioPin via OutputOpenDrive, PushPullNo, output true.\n// @pin pointer to GpioPin to disconnect.", "url": "https://github.com/jamisonderek/flipper-zero-tutorials/blob/89716a9b00eacce7055a75fddb5a3adde93f265f/gpio/gpio_7segment/gpio_7segment_app.c#L171-L174", "sha": "89716a9b00eacce7055a75fddb5a3adde93f265f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "rgbleds_reset", "code": "void rgbleds_reset(RgbLeds* leds) {\n for(int i = 0; i < leds->num_leds; i++) {\n leds->color[i] = 0x000000;\n }\n}", "docstring": "/**\n * @brief Resets the LEDs to their default color pattern (off).\n * @details This method resets the LEDs data to their default color pattern (off).\n * You must still call rgbleds_update to update the LEDs.\n * @param leds The RgbLeds struct to reset.\n*/", "url": "https://github.com/jamisonderek/flipper-zero-tutorials/blob/89716a9b00eacce7055a75fddb5a3adde93f265f/js/flipboard/modules/js_rgbleds/rgbleds.c#L52-L56", "sha": "89716a9b00eacce7055a75fddb5a3adde93f265f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ct_timer_interrupt", "code": "static void ct_timer_interrupt(void *data, unsigned int status)\n{\n\tstruct ct_timer *timer = data;\n\n\t/* Interval timer interrupt */\n\tif ((status & IT_INT) && timer->ops->interrupt)\n\t\ttimer->ops->interrupt(timer);\n}", "docstring": "/*\n * timer manager\n */", "url": "https://github.com/crazii/SBEMU/blob/f0cbde063396deb2c1246c9b3108a37755c68e9a/drivers/ctxfi/cttimer.c#L405-L412", "sha": "f0cbde063396deb2c1246c9b3108a37755c68e9a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "curlx_uztosi", "code": "int curlx_uztosi(size_t uznum)\n{\n#ifdef __INTEL_COMPILER\n# pragma warning(push)\n# pragma warning(disable:810) /* conversion may lose significant bits */\n#endif\n\n DEBUGASSERT(uznum <= (size_t) CURL_MASK_SINT);\n return (int)(uznum & (size_t) CURL_MASK_SINT);\n\n#ifdef __INTEL_COMPILER\n# pragma warning(pop)\n#endif\n}", "docstring": "/*\n** unsigned size_t to signed int\n*/", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/curl/lib/warnless.c#L181-L194", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Curl_ipvalid", "code": "bool Curl_ipvalid(struct connectdata *conn)\n{\n if(conn->ip_version == CURL_IPRESOLVE_V6)\n return Curl_ipv6works();\n return TRUE;\n}", "docstring": "/*\n * Curl_ipvalid() checks what CURL_IPRESOLVE_* requirements that might've\n * been set and returns TRUE if they are OK.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/curl/lib/hostip6.c#L126-L131", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Curl_infof", "code": "void Curl_infof(struct SessionHandle *data, const char *fmt, ...)\n{\n if(data && data->set.verbose) {\n va_list ap;\n size_t len;\n char print_buffer[2048 + 1];\n va_start(ap, fmt);\n vsnprintf(print_buffer, sizeof(print_buffer), fmt, ap);\n va_end(ap);\n len = strlen(print_buffer);\n Curl_debug(data, CURLINFO_TEXT, print_buffer, len, NULL);\n }\n}", "docstring": "/* CURL_DO_LINEEND_CONV */\n/* Curl_infof() is for info message along the way */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/curl/lib/sendf.c#L136-L148", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Curl_single_getsock", "code": "int Curl_single_getsock(const struct connectdata *conn,\n curl_socket_t *sock, /* points to numsocks number\n of sockets */\n int numsocks)\n{\n const struct SessionHandle *data = conn->data;\n int bitmap = GETSOCK_BLANK;\n unsigned sockindex = 0;\n\n if(conn->handler->perform_getsock)\n return conn->handler->perform_getsock(conn, sock, numsocks);\n\n if(numsocks < 2)\n /* simple check but we might need two slots */\n return GETSOCK_BLANK;\n\n /* don't include HOLD and PAUSE connections */\n if((data->req.keepon & KEEP_RECVBITS) == KEEP_RECV) {\n\n DEBUGASSERT(conn->sockfd != CURL_SOCKET_BAD);\n\n bitmap |= GETSOCK_READSOCK(sockindex);\n sock[sockindex] = conn->sockfd;\n }\n\n /* don't include HOLD and PAUSE connections */\n if((data->req.keepon & KEEP_SENDBITS) == KEEP_SEND) {\n\n if((conn->sockfd != conn->writesockfd) ||\n !(data->req.keepon & KEEP_RECV)) {\n /* only if they are not the same socket or we didn't have a readable\n one, we increase index */\n if(data->req.keepon & KEEP_RECV)\n sockindex++; /* increase index if we need two entries */\n\n DEBUGASSERT(conn->writesockfd != CURL_SOCKET_BAD);\n\n sock[sockindex] = conn->writesockfd;\n }\n\n bitmap |= GETSOCK_WRITESOCK(sockindex);\n }\n\n return bitmap;\n}", "docstring": "/*\n * Curl_single_getsock() gets called by the multi interface code when the app\n * has requested to get the sockets for the current connection. This function\n * will then be called once for every connection that the multi interface\n * keeps track of. This function will only be called for connections that are\n * in the proper state to have this information available.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/curl/lib/transfer.c#L1148-L1192", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Curl_close", "code": "CURLcode Curl_close(struct SessionHandle *data)\n{\n struct Curl_multi *m;\n\n if(!data)\n return CURLE_OK;\n\n Curl_expire(data, 0); /* shut off timers */\n\n m = data->multi;\n\n if(m)\n /* This handle is still part of a multi handle, take care of this first\n and detach this handle from there. */\n curl_multi_remove_handle(data->multi, data);\n\n if(data->multi_easy)\n /* when curl_easy_perform() is used, it creates its own multi handle to\n use and this is the one */\n curl_multi_cleanup(data->multi_easy);\n\n /* Destroy the timeout list that is held in the easy handle. It is\n /normally/ done by curl_multi_remove_handle() but this is \"just in\n case\" */\n if(data->state.timeoutlist) {\n Curl_llist_destroy(data->state.timeoutlist, NULL);\n data->state.timeoutlist = NULL;\n }\n\n data->magic = 0; /* force a clear AFTER the possibly enforced removal from\n the multi handle, since that function uses the magic\n field! */\n\n if(data->state.rangestringalloc)\n free(data->state.range);\n\n /* Free the pathbuffer */\n Curl_safefree(data->state.pathbuffer);\n data->state.path = NULL;\n\n Curl_safefree(data->state.proto.generic);\n\n /* Close down all open SSL info and sessions */\n Curl_ssl_close_all(data);\n Curl_safefree(data->state.first_host);\n Curl_safefree(data->state.scratch);\n Curl_ssl_free_certinfo(data);\n\n if(data->change.referer_alloc) {\n Curl_safefree(data->change.referer);\n data->change.referer_alloc = FALSE;\n }\n data->change.referer = NULL;\n\n if(data->change.url_alloc) {\n Curl_safefree(data->change.url);\n data->change.url_alloc = FALSE;\n }\n data->change.url = NULL;\n\n Curl_safefree(data->state.headerbuff);\n\n Curl_flush_cookies(data, 1);\n\n Curl_digest_cleanup(data);\n\n Curl_safefree(data->info.contenttype);\n Curl_safefree(data->info.wouldredirect);\n\n /* this destroys the channel and we cannot use it anymore after this */\n Curl_resolver_cleanup(data->state.resolver);\n\n Curl_convert_close(data);\n\n /* No longer a dirty share, if it exists */\n if(data->share) {\n Curl_share_lock(data, CURL_LOCK_DATA_SHARE, CURL_LOCK_ACCESS_SINGLE);\n data->share->dirty--;\n Curl_share_unlock(data, CURL_LOCK_DATA_SHARE);\n }\n\n Curl_freeset(data);\n free(data);\n return CURLE_OK;\n}", "docstring": "/*\n * This is the internal function curl_easy_cleanup() calls. This should\n * cleanup and free all resources associated with this sessionhandle.\n *\n * NOTE: if we ever add something that attempts to write to a socket or\n * similar here, we must ignore SIGPIPE first. It is currently only done\n * when curl_easy_perform() is invoked.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/curl/lib/url.c#L366-L450", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get_request", "code": "static int get_request(curl_socket_t sock, struct httprequest *req)\n{\n int error;\n int fail = 0;\n int done_processing = 0;\n char *reqbuf = req->reqbuf;\n ssize_t got = 0;\n\n char *pipereq = NULL;\n size_t pipereq_length = 0;\n\n if(req->pipelining) {\n pipereq = reqbuf + req->checkindex;\n pipereq_length = req->offset - req->checkindex;\n }\n\n /*** Init the httprequest structure properly for the upcoming request ***/\n\n req->checkindex = 0;\n req->offset = 0;\n req->testno = DOCNUMBER_NOTHING;\n req->partno = 0;\n req->open = TRUE;\n req->auth_req = FALSE;\n req->auth = FALSE;\n req->cl = 0;\n req->digest = FALSE;\n req->ntlm = FALSE;\n req->pipe = 0;\n req->skip = 0;\n req->rcmd = RCMD_NORMALREQ;\n req->protocol = RPROT_NONE;\n req->prot_version = 0;\n req->pipelining = FALSE;\n req->rtp_buffer = NULL;\n req->rtp_buffersize = 0;\n\n /*** end of httprequest init ***/\n\n while(!done_processing && (req->offset < REQBUFSIZ-1)) {\n if(pipereq_length && pipereq) {\n memmove(reqbuf, pipereq, pipereq_length);\n got = curlx_uztosz(pipereq_length);\n pipereq_length = 0;\n }\n else {\n if(req->skip)\n /* we are instructed to not read the entire thing, so we make sure to only\n read what we're supposed to and NOT read the enire thing the client\n wants to send! */\n got = sread(sock, reqbuf + req->offset, req->cl);\n else\n got = sread(sock, reqbuf + req->offset, REQBUFSIZ-1 - req->offset);\n }\n if(got_exit_signal)\n return 1;\n if(got == 0) {\n logmsg(\"Connection closed by client\");\n fail = 1;\n }\n else if(got < 0) {\n error = SOCKERRNO;\n logmsg(\"recv() returned error: (%d) %s\", error, strerror(error));\n fail = 1;\n }\n if(fail) {\n /* dump the request received so far to the external file */\n reqbuf[req->offset] = '\\0';\n storerequest(reqbuf, req->offset);\n return 1;\n }\n\n logmsg(\"Read %zd bytes\", got);\n\n req->offset += (size_t)got;\n reqbuf[req->offset] = '\\0';\n\n done_processing = ProcessRequest(req);\n if(got_exit_signal)\n return 1;\n if(done_processing && req->pipe) {\n logmsg(\"Waiting for another piped request\");\n done_processing = 0;\n req->pipe--;\n }\n }\n\n if((req->offset == REQBUFSIZ-1) && (got > 0)) {\n logmsg(\"Request would overflow buffer, closing connection\");\n /* dump request received so far to external file anyway */\n reqbuf[REQBUFSIZ-1] = '\\0';\n fail = 1;\n }\n else if(req->offset > REQBUFSIZ-1) {\n logmsg(\"Request buffer overflow, closing connection\");\n /* dump request received so far to external file anyway */\n reqbuf[REQBUFSIZ-1] = '\\0';\n fail = 1;\n }\n else\n reqbuf[req->offset] = '\\0';\n\n /* dump the request to an external file */\n storerequest(reqbuf, req->pipelining ? req->checkindex : req->offset);\n if(got_exit_signal)\n return 1;\n\n return fail; /* return 0 on success */\n}", "docstring": "/* return 0 on success, non-zero on failure */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/curl/tests/server/rtspd.c#L772-L880", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pkey_cb", "code": "static int pkey_cb(int operation, ASN1_VALUE **pval, const ASN1_ITEM *it,\n\t\t\t\t\t\t\tvoid *exarg)\n{\n\t/* Since the structure must still be valid use ASN1_OP_FREE_PRE */\n\tif(operation == ASN1_OP_FREE_PRE) {\n\t\tPKCS8_PRIV_KEY_INFO *key = (PKCS8_PRIV_KEY_INFO *)*pval;\n\t\tif (key->pkey->value.octet_string)\n\t\tOPENSSL_cleanse(key->pkey->value.octet_string->data,\n\t\t\tkey->pkey->value.octet_string->length);\n\t}\n\treturn 1;\n}", "docstring": "/* Minor tweak to operation: zero private key data */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/openssl/crypto/asn1/p8_pkey.c#L65-L76", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "asn1_do_lock", "code": "int asn1_do_lock(ASN1_VALUE **pval, int op, const ASN1_ITEM *it)\n\t{\n\tconst ASN1_AUX *aux;\n\tint *lck, ret;\n\tif ((it->itype != ASN1_ITYPE_SEQUENCE)\n\t && (it->itype != ASN1_ITYPE_NDEF_SEQUENCE))\n\t\treturn 0;\n\taux = it->funcs;\n\tif (!aux || !(aux->flags & ASN1_AFLG_REFCOUNT))\n\t\treturn 0;\n\tlck = offset2ptr(*pval, aux->ref_offset);\n\tif (op == 0)\n\t\t{\n\t\t*lck = 1;\n\t\treturn 1;\n\t\t}\n\tret = CRYPTO_add(lck, op, aux->ref_lock);\n#ifdef REF_PRINT\n\tfprintf(stderr, \"%s: Reference Count: %d\\n\", it->sname, *lck);\n#endif\n#ifdef REF_CHECK\n\tif (ret < 0) \n\t\tfprintf(stderr, \"%s, bad reference count\\n\", it->sname);\n#endif\n\treturn ret;\n\t}", "docstring": "/* Do reference counting. The value 'op' decides what to do. \n * if it is +1 then the count is incremented. If op is 0 count is\n * set to 1. If op is -1 count is decremented and the return value\n * is the current refrence count or 0 if no reference count exists.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/openssl/crypto/asn1/tasn_utl.c#L101-L126", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TS_RESP_CTX_cleanup", "code": "static void TS_RESP_CTX_cleanup(TS_RESP_CTX *ctx)\n\t{\n\tTS_REQ_free(ctx->request);\n\tctx->request = NULL;\n\tTS_RESP_free(ctx->response);\n\tctx->response = NULL;\n\tTS_TST_INFO_free(ctx->tst_info);\n\tctx->tst_info = NULL;\n\t}", "docstring": "/* Cleans up the variable part of the context. */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/openssl/crypto/ts/ts_rsp_sign.c#L495-L503", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "X509_STORE_CTX_trusted_stack", "code": "void X509_STORE_CTX_trusted_stack(X509_STORE_CTX *ctx, STACK_OF(X509) *sk)\n{\n\tctx->other_ctx = sk;\n\tctx->get_issuer = get_issuer_sk;\n}", "docstring": "/* Set alternative lookup method: just a STACK of trusted certificates.\n * This avoids X509_STORE nastiness where it isn't needed.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/openssl/crypto/x509/x509_vfy.c#L2128-L2132", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "app_info_free", "code": "static void app_info_free(APP_INFO *inf)\n\t{\n\tif (--(inf->references) <= 0)\n\t\t{\n\t\tif (inf->next != NULL)\n\t\t\t{\n\t\t\tapp_info_free(inf->next);\n\t\t\t}\n\t\tOPENSSL_free(inf);\n\t\t}\n\t}", "docstring": "/* Valid iff num_disable > 0.\n * CRYPTO_LOCK_MALLOC2 is locked\n * exactly in this case (by the\n * thread named in disabling_thread).\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/openssl_android/crypto/mem_dbg.c#L145-L155", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "asn1_collate_primitive", "code": "static int asn1_collate_primitive(ASN1_STRING *a, ASN1_const_CTX *c)\n\t{\n\tASN1_STRING *os=NULL;\n\tBUF_MEM b;\n\tint num;\n\n\tb.length=0;\n\tb.max=0;\n\tb.data=NULL;\n\n\tif (a == NULL)\n\t\t{\n\t\tc->error=ERR_R_PASSED_NULL_PARAMETER;\n\t\tgoto err;\n\t\t}\n\n\tnum=0;\n\tfor (;;)\n\t\t{\n\t\tif (c->inf & 1)\n\t\t\t{\n\t\t\tc->eos=ASN1_const_check_infinite_end(&c->p,\n\t\t\t\t(long)(c->max-c->p));\n\t\t\tif (c->eos) break;\n\t\t\t}\n\t\telse\n\t\t\t{\n\t\t\tif (c->slen <= 0) break;\n\t\t\t}\n\n\t\tc->q=c->p;\n\t\tif (d2i_ASN1_bytes(&os,&c->p,c->max-c->p,c->tag,c->xclass)\n\t\t\t== NULL)\n\t\t\t{\n\t\t\tc->error=ERR_R_ASN1_LIB;\n\t\t\tgoto err;\n\t\t\t}\n\n\t\tif (!BUF_MEM_grow_clean(&b,num+os->length))\n\t\t\t{\n\t\t\tc->error=ERR_R_BUF_LIB;\n\t\t\tgoto err;\n\t\t\t}\n\t\tmemcpy(&(b.data[num]),os->data,os->length);\n\t\tif (!(c->inf & 1))\n\t\t\tc->slen-=(c->p-c->q);\n\t\tnum+=os->length;\n\t\t}\n\n\tif (!asn1_const_Finish(c)) goto err;\n\n\ta->length=num;\n\tif (a->data != NULL) OPENSSL_free(a->data);\n\ta->data=(unsigned char *)b.data;\n\tif (os != NULL) ASN1_STRING_free(os);\n\treturn(1);\nerr:\n\tASN1err(ASN1_F_ASN1_COLLATE_PRIMITIVE,c->error);\n\tif (os != NULL) ASN1_STRING_free(os);\n\tif (b.data != NULL) OPENSSL_free(b.data);\n\treturn(0);\n\t}", "docstring": "/* We are about to parse 0..n d2i_ASN1_bytes objects, we are to collapse\n * them into the one structure that is then returned */\n/* There have been a few bug fixes for this function from\n * Paul Keogh , many thanks to him */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/openssl_android/crypto/asn1/a_bytes.c#L252-L313", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BN_POOL_init", "code": "static void BN_POOL_init(BN_POOL *p)\n\t{\n\tp->head = p->current = p->tail = NULL;\n\tp->used = p->size = 0;\n\t}", "docstring": "/***********/\n/* BN_POOL */\n/***********/", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/openssl_android/crypto/bn/bn_ctx.c#L360-L364", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "dynamic_data_ctx_free_func", "code": "static void dynamic_data_ctx_free_func(void *parent, void *ptr,\n\t\t\tCRYPTO_EX_DATA *ad, int idx, long argl, void *argp)\n\t{\n\tif(ptr)\n\t\t{\n\t\tdynamic_data_ctx *ctx = (dynamic_data_ctx *)ptr;\n\t\tif(ctx->dynamic_dso)\n\t\t\tDSO_free(ctx->dynamic_dso);\n\t\tif(ctx->DYNAMIC_LIBNAME)\n\t\t\tOPENSSL_free((void*)ctx->DYNAMIC_LIBNAME);\n\t\tif(ctx->engine_id)\n\t\t\tOPENSSL_free((void*)ctx->engine_id);\n\t\tif(ctx->dirs)\n\t\t\tsk_pop_free(ctx->dirs, int_free_str);\n\t\tOPENSSL_free(ctx);\n\t\t}\n\t}", "docstring": "/* Because our ex_data element may or may not get allocated depending on whether\n * a \"first-use\" occurs before the ENGINE is freed, we have a memory leak\n * problem to solve. We can't declare a \"new\" handler for the ex_data as we\n * don't want a dynamic_data_ctx in *all* ENGINE structures of all types (this\n * is a bug in the design of CRYPTO_EX_DATA). As such, we just declare a \"free\"\n * handler and that will get called if an ENGINE is being destroyed and there\n * was an ex_data element corresponding to our context type. */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/openssl_android/crypto/engine/eng_dyn.c#L164-L180", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "png_destroy_write_struct", "code": "void PNGAPI\npng_destroy_write_struct(png_structpp png_ptr_ptr, png_infopp info_ptr_ptr)\n{\n png_debug(1, \"in png_destroy_write_struct\");\n\n if (png_ptr_ptr != NULL)\n {\n png_structrp png_ptr = *png_ptr_ptr;\n\n if (png_ptr != NULL) /* added in libpng 1.6.0 */\n {\n png_destroy_info_struct(png_ptr, info_ptr_ptr);\n\n *png_ptr_ptr = NULL;\n png_write_destroy(png_ptr);\n png_destroy_png_struct(png_ptr);\n }\n }\n}", "docstring": "/* Free all memory used by the write.\n * In libpng 1.6.0 this API changed quietly to no longer accept a NULL value for\n * *png_ptr_ptr. Prior to 1.6.0 it would accept such a value and it would free\n * the passed in info_structs but it would quietly fail to free any of the data\n * inside them. In 1.6.0 it quietly does nothing (it has to be quiet because it\n * has no png_ptr.)\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/png/pngwrite.c#L900-L918", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "png_write_pHYs", "code": "void /* PRIVATE */\npng_write_pHYs(png_structrp png_ptr, png_uint_32 x_pixels_per_unit,\n png_uint_32 y_pixels_per_unit,\n int unit_type)\n{\n png_byte buf[9];\n\n png_debug(1, \"in png_write_pHYs\");\n\n if (unit_type >= PNG_RESOLUTION_LAST)\n png_warning(png_ptr, \"Unrecognized unit type for pHYs chunk\");\n\n png_save_uint_32(buf, x_pixels_per_unit);\n png_save_uint_32(buf + 4, y_pixels_per_unit);\n buf[8] = (png_byte)unit_type;\n\n png_write_complete_chunk(png_ptr, png_pHYs, buf, (png_size_t)9);\n}", "docstring": "/* Write the pHYs chunk */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/png/pngwutil.c#L1887-L1904", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "writepng_encode_image", "code": "int writepng_encode_image(mainprog_info *mainprog_ptr)\n{\n png_structp png_ptr = (png_structp)mainprog_ptr->png_ptr;\n png_infop info_ptr = (png_infop)mainprog_ptr->info_ptr;\n\n\n /* as always, setjmp() must be called in every function that calls a\n * PNG-writing libpng function */\n\n if (setjmp(mainprog_ptr->jmpbuf)) {\n png_destroy_write_struct(&png_ptr, &info_ptr);\n mainprog_ptr->png_ptr = NULL;\n mainprog_ptr->info_ptr = NULL;\n return 2;\n }\n\n\n /* and now we just write the whole image; libpng takes care of interlacing\n * for us */\n\n png_write_image(png_ptr, mainprog_ptr->row_pointers);\n\n\n /* since that's it, we also close out the end of the PNG file now--if we\n * had any text or time info to write after the IDATs, second argument\n * would be info_ptr, but we optimize slightly by sending NULL pointer: */\n\n png_write_end(png_ptr, NULL);\n\n return 0;\n}", "docstring": "/* returns 0 for success, 2 for libpng (longjmp) problem */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/png/contrib/gregbook/writepng.c#L259-L289", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "gpc_sRGB", "code": "static void\ngpc_sRGB(Pixel *out, const Pixel *in, const Background *back)\n{\n (void)back;\n\n out->r = isRGB(in->r);\n\n if (in->g == in->r)\n {\n out->g = out->r;\n\n if (in->b == in->r)\n out->b = out->r;\n\n else\n out->b = isRGB(in->b);\n }\n\n else\n {\n out->g = isRGB(in->g);\n\n if (in->b == in->r)\n out->b = out->r;\n\n else if (in->b == in->g)\n out->b = out->g;\n\n else\n out->b = isRGB(in->b);\n }\n\n out->a = 255;\n}", "docstring": "/* 16-bit to 8-bit conversions */\n/* sRGB: convert linear components to sRGB, alpha := 255 */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/png/contrib/libtests/pngstest.c#L1598-L1631", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "is_leap", "code": "static int\nis_leap(int year)\n{\n /* Cast year to unsigned. The result is the same either way, but\n * C can generate faster code for unsigned mod than for signed\n * mod (especially for % 4 -- a good compiler should just grab\n * the last 2 bits when the LHS is unsigned).\n */\n const unsigned int ayear = (unsigned int)year;\n return ayear % 4 == 0 && (ayear % 100 != 0 || ayear % 400 == 0);\n}", "docstring": "/* year -> 1 if leap year, else 0. */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Modules/datetimemodule.c#L176-L186", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "move_finalizers", "code": "static void\nmove_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)\n{\n PyGC_Head *gc;\n PyGC_Head *next;\n\n /* March over unreachable. Move objects with finalizers into\n * `finalizers`.\n */\n for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) {\n PyObject *op = FROM_GC(gc);\n\n assert(IS_TENTATIVELY_UNREACHABLE(op));\n next = gc->gc.gc_next;\n\n if (has_finalizer(op)) {\n gc_list_move(gc, finalizers);\n gc->gc.gc_refs = GC_REACHABLE;\n }\n }\n}", "docstring": "/* Move the objects in unreachable with __del__ methods into `finalizers`.\n * Objects moved into `finalizers` have gc_refs set to GC_REACHABLE; the\n * objects remaining in unreachable are left at GC_TENTATIVELY_UNREACHABLE.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Modules/gcmodule.c#L536-L556", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sha_transform", "code": "static void\nsha_transform(SHAobject *sha_info)\n{\n int i;\n SHA_INT32 T, A, B, C, D, E, W[80], *WP;\n\n memcpy(W, sha_info->data, sizeof(sha_info->data));\n longReverse(W, (int)sizeof(sha_info->data), sha_info->Endianness);\n\n for (i = 16; i < 80; ++i) {\n W[i] = W[i-3] ^ W[i-8] ^ W[i-14] ^ W[i-16];\n\n /* extra rotation fix */\n W[i] = R32(W[i], 1);\n }\n A = sha_info->digest[0];\n B = sha_info->digest[1];\n C = sha_info->digest[2];\n D = sha_info->digest[3];\n E = sha_info->digest[4];\n WP = W;\n#ifdef UNRAVEL\n FA(1); FB(1); FC(1); FD(1); FE(1); FT(1); FA(1); FB(1); FC(1); FD(1);\n FE(1); FT(1); FA(1); FB(1); FC(1); FD(1); FE(1); FT(1); FA(1); FB(1);\n FC(2); FD(2); FE(2); FT(2); FA(2); FB(2); FC(2); FD(2); FE(2); FT(2);\n FA(2); FB(2); FC(2); FD(2); FE(2); FT(2); FA(2); FB(2); FC(2); FD(2);\n FE(3); FT(3); FA(3); FB(3); FC(3); FD(3); FE(3); FT(3); FA(3); FB(3);\n FC(3); FD(3); FE(3); FT(3); FA(3); FB(3); FC(3); FD(3); FE(3); FT(3);\n FA(4); FB(4); FC(4); FD(4); FE(4); FT(4); FA(4); FB(4); FC(4); FD(4);\n FE(4); FT(4); FA(4); FB(4); FC(4); FD(4); FE(4); FT(4); FA(4); FB(4);\n sha_info->digest[0] += E;\n sha_info->digest[1] += T;\n sha_info->digest[2] += A;\n sha_info->digest[3] += B;\n sha_info->digest[4] += C;\n#else /* !UNRAVEL */\n#ifdef UNROLL_LOOPS\n FG(1); FG(1); FG(1); FG(1); FG(1); FG(1); FG(1); FG(1); FG(1); FG(1);\n FG(1); FG(1); FG(1); FG(1); FG(1); FG(1); FG(1); FG(1); FG(1); FG(1);\n FG(2); FG(2); FG(2); FG(2); FG(2); FG(2); FG(2); FG(2); FG(2); FG(2);\n FG(2); FG(2); FG(2); FG(2); FG(2); FG(2); FG(2); FG(2); FG(2); FG(2);\n FG(3); FG(3); FG(3); FG(3); FG(3); FG(3); FG(3); FG(3); FG(3); FG(3);\n FG(3); FG(3); FG(3); FG(3); FG(3); FG(3); FG(3); FG(3); FG(3); FG(3);\n FG(4); FG(4); FG(4); FG(4); FG(4); FG(4); FG(4); FG(4); FG(4); FG(4);\n FG(4); FG(4); FG(4); FG(4); FG(4); FG(4); FG(4); FG(4); FG(4); FG(4);\n#else /* !UNROLL_LOOPS */\n for (i = 0; i < 20; ++i) { FG(1); }\n for (i = 20; i < 40; ++i) { FG(2); }\n for (i = 40; i < 60; ++i) { FG(3); }\n for (i = 60; i < 80; ++i) { FG(4); }\n#endif /* !UNROLL_LOOPS */\n sha_info->digest[0] += A;\n sha_info->digest[1] += B;\n sha_info->digest[2] += C;\n sha_info->digest[3] += D;\n sha_info->digest[4] += E;\n#endif /* !UNRAVEL */\n}", "docstring": "/* do SHA transformation */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Modules/shamodule.c#L160-L217", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "deflate_slow", "code": "local block_state deflate_slow(s, flush)\n deflate_state *s;\n int flush;\n{\n IPos hash_head = NIL; /* head of hash chain */\n int bflush; /* set if current block must be flushed */\n\n /* Process the input block. */\n for (;;) {\n /* Make sure that we always have enough lookahead, except\n * at the end of the input file. We need MAX_MATCH bytes\n * for the next match, plus MIN_MATCH bytes to insert the\n * string following the next match.\n */\n if (s->lookahead < MIN_LOOKAHEAD) {\n fill_window(s);\n if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {\n return need_more;\n }\n if (s->lookahead == 0) break; /* flush the current block */\n }\n\n /* Insert the string window[strstart .. strstart+2] in the\n * dictionary, and set hash_head to the head of the hash chain:\n */\n if (s->lookahead >= MIN_MATCH) {\n INSERT_STRING(s, s->strstart, hash_head);\n }\n\n /* Find the longest match, discarding those <= prev_length.\n */\n s->prev_length = s->match_length, s->prev_match = s->match_start;\n s->match_length = MIN_MATCH-1;\n\n if (hash_head != NIL && s->prev_length < s->max_lazy_match &&\n s->strstart - hash_head <= MAX_DIST(s)) {\n /* To simplify the code, we prevent matches with the string\n * of window index 0 (in particular we have to avoid a match\n * of the string with itself at the start of the input file).\n */\n if (s->strategy != Z_HUFFMAN_ONLY && s->strategy != Z_RLE) {\n s->match_length = longest_match (s, hash_head);\n } else if (s->strategy == Z_RLE && s->strstart - hash_head == 1) {\n s->match_length = longest_match_fast (s, hash_head);\n }\n /* longest_match() or longest_match_fast() sets match_start */\n\n if (s->match_length <= 5 && (s->strategy == Z_FILTERED\n#if TOO_FAR <= 32767\n || (s->match_length == MIN_MATCH &&\n s->strstart - s->match_start > TOO_FAR)\n#endif\n )) {\n\n /* If prev_match is also MIN_MATCH, match_start is garbage\n * but we will ignore the current match anyway.\n */\n s->match_length = MIN_MATCH-1;\n }\n }\n /* If there was a match at the previous step and the current\n * match is not better, output the previous match:\n */\n if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {\n uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;\n /* Do not insert strings in hash table beyond this. */\n\n check_match(s, s->strstart-1, s->prev_match, s->prev_length);\n\n _tr_tally_dist(s, s->strstart -1 - s->prev_match,\n s->prev_length - MIN_MATCH, bflush);\n\n /* Insert in hash table all strings up to the end of the match.\n * strstart-1 and strstart are already inserted. If there is not\n * enough lookahead, the last two strings are not inserted in\n * the hash table.\n */\n s->lookahead -= s->prev_length-1;\n s->prev_length -= 2;\n do {\n if (++s->strstart <= max_insert) {\n INSERT_STRING(s, s->strstart, hash_head);\n }\n } while (--s->prev_length != 0);\n s->match_available = 0;\n s->match_length = MIN_MATCH-1;\n s->strstart++;\n\n if (bflush) FLUSH_BLOCK(s, 0);\n\n } else if (s->match_available) {\n /* If there was no match at the previous position, output a\n * single literal. If there was a match but the current match\n * is longer, truncate the previous match to a single literal.\n */\n Tracevv((stderr,\"%c\", s->window[s->strstart-1]));\n _tr_tally_lit(s, s->window[s->strstart-1], bflush);\n if (bflush) {\n FLUSH_BLOCK_ONLY(s, 0);\n }\n s->strstart++;\n s->lookahead--;\n if (s->strm->avail_out == 0) return need_more;\n } else {\n /* There is no previous match to compare with, wait for\n * the next step to decide.\n */\n s->match_available = 1;\n s->strstart++;\n s->lookahead--;\n }\n }\n Assert (flush != Z_NO_FLUSH, \"no flush?\");\n if (s->match_available) {\n Tracevv((stderr,\"%c\", s->window[s->strstart-1]));\n _tr_tally_lit(s, s->window[s->strstart-1], bflush);\n s->match_available = 0;\n }\n FLUSH_BLOCK(s, flush == Z_FINISH);\n return flush == Z_FINISH ? finish_done : block_done;\n}", "docstring": "/* ===========================================================================\n * Same as above, but achieves better compression. We use a lazy\n * evaluation for matches: a match is finally adopted only if there is\n * no better match at the next window position.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Modules/zlib/deflate.c#L1554-L1674", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "int_print", "code": "static int\nint_print(PyIntObject *v, FILE *fp, int flags)\n /* flags -- not used but required by interface */\n{\n long int_val = v->ob_ival;\n Py_BEGIN_ALLOW_THREADS\n fprintf(fp, \"%ld\", int_val);\n Py_END_ALLOW_THREADS\n return 0;\n}", "docstring": "/* ARGSUSED */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python/Objects/intobject.c#L442-L451", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "out_of_memory", "code": "LOCAL(void)\nout_of_memory (j_common_ptr cinfo, int which)\n/* Report an out-of-memory error and stop execution */\n/* If we compiled MEM_STATS support, report alloc requests before dying */\n{\n#ifdef MEM_STATS\n cinfo->err->trace_level = 2;\t/* force self_destruct to report stats */\n#endif\n ERREXIT1(cinfo, JERR_OUT_OF_MEMORY, which);\n}", "docstring": "/* MEM_STATS */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/python_modules/tools/jpeg-6b/jmemmgr.c#L216-L225", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "checkTreePage", "code": "static int checkTreePage(\n IntegrityCk *pCheck, /* Context for the sanity check */\n int iPage, /* Page number of the page to check */\n char *zParentContext, /* Parent context */\n i64 *pnParentMinKey, \n i64 *pnParentMaxKey\n){\n MemPage *pPage;\n int i, rc, depth, d2, pgno, cnt;\n int hdr, cellStart;\n int nCell;\n u8 *data;\n BtShared *pBt;\n int usableSize;\n char zContext[100];\n char *hit = 0;\n i64 nMinKey = 0;\n i64 nMaxKey = 0;\n\n sqlite3_snprintf(sizeof(zContext), zContext, \"Page %d: \", iPage);\n\n /* Check that the page exists\n */\n pBt = pCheck->pBt;\n usableSize = pBt->usableSize;\n if( iPage==0 ) return 0;\n if( checkRef(pCheck, iPage, zParentContext) ) return 0;\n if( (rc = btreeGetPage(pBt, (Pgno)iPage, &pPage, 0))!=0 ){\n checkAppendMsg(pCheck, zContext,\n \"unable to get the page. error code=%d\", rc);\n return 0;\n }\n\n /* Clear MemPage.isInit to make sure the corruption detection code in\n ** btreeInitPage() is executed. */\n pPage->isInit = 0;\n if( (rc = btreeInitPage(pPage))!=0 ){\n assert( rc==SQLITE_CORRUPT ); /* The only possible error from InitPage */\n checkAppendMsg(pCheck, zContext, \n \"btreeInitPage() returns error code %d\", rc);\n releasePage(pPage);\n return 0;\n }\n\n /* Check out all the cells.\n */\n depth = 0;\n for(i=0; inCell && pCheck->mxErr; i++){\n u8 *pCell;\n u32 sz;\n CellInfo info;\n\n /* Check payload overflow pages\n */\n sqlite3_snprintf(sizeof(zContext), zContext,\n \"On tree page %d cell %d: \", iPage, i);\n pCell = findCell(pPage,i);\n btreeParseCellPtr(pPage, pCell, &info);\n sz = info.nData;\n if( !pPage->intKey ) sz += (int)info.nKey;\n /* For intKey pages, check that the keys are in order.\n */\n else if( i==0 ) nMinKey = nMaxKey = info.nKey;\n else{\n if( info.nKey <= nMaxKey ){\n checkAppendMsg(pCheck, zContext, \n \"Rowid %lld out of order (previous was %lld)\", info.nKey, nMaxKey);\n }\n nMaxKey = info.nKey;\n }\n assert( sz==info.nPayload );\n if( (sz>info.nLocal) \n && (&pCell[info.iOverflow]<=&pPage->aData[pBt->usableSize])\n ){\n int nPage = (sz - info.nLocal + usableSize - 5)/(usableSize - 4);\n Pgno pgnoOvfl = get4byte(&pCell[info.iOverflow]);\n#ifndef SQLITE_OMIT_AUTOVACUUM\n if( pBt->autoVacuum ){\n checkPtrmap(pCheck, pgnoOvfl, PTRMAP_OVERFLOW1, iPage, zContext);\n }\n#endif\n checkList(pCheck, 0, pgnoOvfl, nPage, zContext);\n }\n\n /* Check sanity of left child page.\n */\n if( !pPage->leaf ){\n pgno = get4byte(pCell);\n#ifndef SQLITE_OMIT_AUTOVACUUM\n if( pBt->autoVacuum ){\n checkPtrmap(pCheck, pgno, PTRMAP_BTREE, iPage, zContext);\n }\n#endif\n d2 = checkTreePage(pCheck, pgno, zContext, &nMinKey, i==0 ? NULL : &nMaxKey);\n if( i>0 && d2!=depth ){\n checkAppendMsg(pCheck, zContext, \"Child page depth differs\");\n }\n depth = d2;\n }\n }\n\n if( !pPage->leaf ){\n pgno = get4byte(&pPage->aData[pPage->hdrOffset+8]);\n sqlite3_snprintf(sizeof(zContext), zContext, \n \"On page %d at right child: \", iPage);\n#ifndef SQLITE_OMIT_AUTOVACUUM\n if( pBt->autoVacuum ){\n checkPtrmap(pCheck, pgno, PTRMAP_BTREE, iPage, zContext);\n }\n#endif\n checkTreePage(pCheck, pgno, zContext, NULL, !pPage->nCell ? NULL : &nMaxKey);\n }\n \n /* For intKey leaf pages, check that the min/max keys are in order\n ** with any left/parent/right pages.\n */\n if( pPage->leaf && pPage->intKey ){\n /* if we are a left child page */\n if( pnParentMinKey ){\n /* if we are the left most child page */\n if( !pnParentMaxKey ){\n if( nMaxKey > *pnParentMinKey ){\n checkAppendMsg(pCheck, zContext, \n \"Rowid %lld out of order (max larger than parent min of %lld)\",\n nMaxKey, *pnParentMinKey);\n }\n }else{\n if( nMinKey <= *pnParentMinKey ){\n checkAppendMsg(pCheck, zContext, \n \"Rowid %lld out of order (min less than parent min of %lld)\",\n nMinKey, *pnParentMinKey);\n }\n if( nMaxKey > *pnParentMaxKey ){\n checkAppendMsg(pCheck, zContext, \n \"Rowid %lld out of order (max larger than parent max of %lld)\",\n nMaxKey, *pnParentMaxKey);\n }\n *pnParentMinKey = nMaxKey;\n }\n /* else if we're a right child page */\n } else if( pnParentMaxKey ){\n if( nMinKey <= *pnParentMaxKey ){\n checkAppendMsg(pCheck, zContext, \n \"Rowid %lld out of order (min less than parent max of %lld)\",\n nMinKey, *pnParentMaxKey);\n }\n }\n }\n\n /* Check for complete coverage of the page\n */\n data = pPage->aData;\n hdr = pPage->hdrOffset;\n hit = sqlite3PageMalloc( pBt->pageSize );\n if( hit==0 ){\n pCheck->mallocFailed = 1;\n }else{\n u16 contentOffset = get2byte(&data[hdr+5]);\n assert( contentOffset<=usableSize ); /* Enforced by btreeInitPage() */\n memset(hit+contentOffset, 0, usableSize-contentOffset);\n memset(hit, 1, contentOffset);\n nCell = get2byte(&data[hdr+3]);\n cellStart = hdr + 12 - 4*pPage->leaf;\n for(i=0; i=usableSize ){\n checkAppendMsg(pCheck, 0, \n \"Corruption detected in cell %d on page %d\",i,iPage);\n }else{\n for(j=pc+size-1; j>=pc; j--) hit[j]++;\n }\n }\n i = get2byte(&data[hdr+1]);\n while( i>0 ){\n int size, j;\n assert( i<=usableSize-4 ); /* Enforced by btreeInitPage() */\n size = get2byte(&data[i+2]);\n assert( i+size<=usableSize ); /* Enforced by btreeInitPage() */\n for(j=i+size-1; j>=i; j--) hit[j]++;\n j = get2byte(&data[i]);\n assert( j==0 || j>i+size ); /* Enforced by btreeInitPage() */\n assert( j<=usableSize-4 ); /* Enforced by btreeInitPage() */\n i = j;\n }\n for(i=cnt=0; i1 ){\n checkAppendMsg(pCheck, 0,\n \"Multiple uses for byte %d of page %d\", i, iPage);\n break;\n }\n }\n if( cnt!=data[hdr+7] ){\n checkAppendMsg(pCheck, 0, \n \"Fragmentation of %d bytes reported as %d on page %d\",\n cnt, data[hdr+7], iPage);\n }\n }\n sqlite3PageFree(hit);\n releasePage(pPage);\n return depth+1;\n}", "docstring": "/*\n** Do various sanity checks on a single page of a tree. Return\n** the tree depth. Root pages return 0. Parents of root pages\n** return 1, and so forth.\n** \n** These checks are done:\n**\n** 1. Make sure that cells and freeblocks do not overlap\n** but combine to completely cover the page.\n** NO 2. Make sure cell keys are in order.\n** NO 3. Make sure no key is less than or equal to zLowerBound.\n** NO 4. Make sure no key is greater than or equal to zUpperBound.\n** 5. Check the integrity of overflow pages.\n** 6. Recursively call checkTreePage on all children.\n** 7. Verify that the depth of all children is the same.\n** 8. Make sure this page is at least 33% full or else it is\n** the root of the tree.\n*/", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/sqlite/sqlite3.c#L45990-L46197", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "removeFromBlockedList", "code": "static void removeFromBlockedList(sqlite3 *db){\n sqlite3 **pp;\n assertMutexHeld();\n for(pp=&sqlite3BlockedList; *pp; pp = &(*pp)->pNextBlocked){\n if( *pp==db ){\n *pp = (*pp)->pNextBlocked;\n break;\n }\n }\n}", "docstring": "/*\n** Remove connection db from the blocked connections list. If connection\n** db is not currently a part of the list, this function is a no-op.\n*/", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/sqlite/sqlite3.c#L98900-L98909", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fts3OffsetsFunc", "code": "static void fts3OffsetsFunc(\n sqlite3_context *pContext, /* SQLite function call context */\n int nVal, /* Size of argument array */\n sqlite3_value **apVal /* Array of arguments */\n){\n Fts3Cursor *pCsr; /* Cursor handle passed through apVal[0] */\n\n UNUSED_PARAMETER(nVal);\n\n assert( nVal==1 );\n if( fts3FunctionArg(pContext, \"offsets\", apVal[0], &pCsr) ) return;\n assert( pCsr );\n if( SQLITE_OK==fts3CursorSeek(pContext, pCsr) ){\n sqlite3Fts3Offsets(pContext, pCsr);\n }\n}", "docstring": "/*\n** Implementation of the offsets() function for FTS3\n*/", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/sqlite/sqlite3.c#L102042-L102057", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "deflatePending", "code": "int ZEXPORT deflatePending (strm, pending, bits)\n unsigned *pending;\n int *bits;\n z_streamp strm;\n{\n if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;\n if (pending != Z_NULL)\n *pending = strm->state->pending;\n if (bits != Z_NULL)\n *bits = strm->state->bi_valid;\n return Z_OK;\n}", "docstring": "/* ========================================================================= */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/zip/deflate.c#L450-L461", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "gen_bitlen", "code": "local void gen_bitlen(s, desc)\n deflate_state *s;\n tree_desc *desc; /* the tree descriptor */\n{\n ct_data *tree = desc->dyn_tree;\n int max_code = desc->max_code;\n const ct_data *stree = desc->stat_desc->static_tree;\n const intf *extra = desc->stat_desc->extra_bits;\n int base = desc->stat_desc->extra_base;\n int max_length = desc->stat_desc->max_length;\n int h; /* heap index */\n int n, m; /* iterate over the tree elements */\n int bits; /* bit length */\n int xbits; /* extra bits */\n ush f; /* frequency */\n int overflow = 0; /* number of elements with bit length too large */\n\n for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;\n\n /* In a first pass, compute the optimal bit lengths (which may\n * overflow in the case of the bit length tree).\n */\n tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */\n\n for (h = s->heap_max+1; h < HEAP_SIZE; h++) {\n n = s->heap[h];\n bits = tree[tree[n].Dad].Len + 1;\n if (bits > max_length) bits = max_length, overflow++;\n tree[n].Len = (ush)bits;\n /* We overwrite tree[n].Dad which is no longer needed */\n\n if (n > max_code) continue; /* not a leaf node */\n\n s->bl_count[bits]++;\n xbits = 0;\n if (n >= base) xbits = extra[n-base];\n f = tree[n].Freq;\n s->opt_len += (ulg)f * (bits + xbits);\n if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);\n }\n if (overflow == 0) return;\n\n Trace((stderr,\"\\nbit length overflow\\n\"));\n /* This happens for example on obj2 and pic of the Calgary corpus */\n\n /* Find the first bit length which could increase: */\n do {\n bits = max_length-1;\n while (s->bl_count[bits] == 0) bits--;\n s->bl_count[bits]--; /* move one leaf down the tree */\n s->bl_count[bits+1] += 2; /* move one overflow item as its brother */\n s->bl_count[max_length]--;\n /* The brother of the overflow item also moves one step up,\n * but this does not affect bl_count[max_length]\n */\n overflow -= 2;\n } while (overflow > 0);\n\n /* Now recompute all bit lengths, scanning in increasing frequency.\n * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all\n * lengths instead of fixing only the wrong ones. This idea is taken\n * from 'ar' written by Haruhiko Okumura.)\n */\n for (bits = max_length; bits != 0; bits--) {\n n = s->bl_count[bits];\n while (n != 0) {\n m = s->heap[--h];\n if (m > max_code) continue;\n if ((unsigned) tree[m].Len != (unsigned) bits) {\n Trace((stderr,\"code %d bits %d->%d\\n\", m, tree[m].Len, bits));\n s->opt_len += ((long)bits - (long)tree[m].Len)\n *(long)tree[m].Freq;\n tree[m].Len = (ush)bits;\n }\n n--;\n }\n }\n}", "docstring": "/* ===========================================================================\n * Compute the optimal bit lengths for a tree and update the total bit length\n * for the current block.\n * IN assertion: the fields freq and dad are set, heap[heap_max] and\n * above are the tree nodes sorted by increasing frequency.\n * OUT assertions: the field len is set to the optimal bit length, the\n * array bl_count contains the frequencies for each bit length.\n * The length opt_len is updated; static_len is also updated if stree is\n * not null.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/zip/trees.c#L488-L565", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "inflate_table9", "code": "int inflate_table9(type, lens, codes, table, bits, work)\ncodetype type;\nunsigned short FAR *lens;\nunsigned codes;\ncode FAR * FAR *table;\nunsigned FAR *bits;\nunsigned short FAR *work;\n{\n unsigned len; /* a code's length in bits */\n unsigned sym; /* index of code symbols */\n unsigned min, max; /* minimum and maximum code lengths */\n unsigned root; /* number of index bits for root table */\n unsigned curr; /* number of index bits for current table */\n unsigned drop; /* code bits to drop for sub-table */\n int left; /* number of prefix codes available */\n unsigned used; /* code entries in table used */\n unsigned huff; /* Huffman code */\n unsigned incr; /* for incrementing code, index */\n unsigned fill; /* index for replicating entries */\n unsigned low; /* low bits for current root entry */\n unsigned mask; /* mask for low root bits */\n code this; /* table entry for duplication */\n code FAR *next; /* next available space in table */\n const unsigned short FAR *base; /* base value table to use */\n const unsigned short FAR *extra; /* extra bits table to use */\n int end; /* use base and extra for symbol > end */\n unsigned short count[MAXBITS+1]; /* number of codes of each length */\n unsigned short offs[MAXBITS+1]; /* offsets in table for each length */\n static const unsigned short lbase[31] = { /* Length codes 257..285 base */\n 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17,\n 19, 23, 27, 31, 35, 43, 51, 59, 67, 83, 99, 115,\n 131, 163, 195, 227, 3, 0, 0};\n static const unsigned short lext[31] = { /* Length codes 257..285 extra */\n 128, 128, 128, 128, 128, 128, 128, 128, 129, 129, 129, 129,\n 130, 130, 130, 130, 131, 131, 131, 131, 132, 132, 132, 132,\n 133, 133, 133, 133, 144, 72, 78};\n static const unsigned short dbase[32] = { /* Distance codes 0..31 base */\n 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49,\n 65, 97, 129, 193, 257, 385, 513, 769, 1025, 1537, 2049, 3073,\n 4097, 6145, 8193, 12289, 16385, 24577, 32769, 49153};\n static const unsigned short dext[32] = { /* Distance codes 0..31 extra */\n 128, 128, 128, 128, 129, 129, 130, 130, 131, 131, 132, 132,\n 133, 133, 134, 134, 135, 135, 136, 136, 137, 137, 138, 138,\n 139, 139, 140, 140, 141, 141, 142, 142};\n\n /*\n Process a set of code lengths to create a canonical Huffman code. The\n code lengths are lens[0..codes-1]. Each length corresponds to the\n symbols 0..codes-1. The Huffman code is generated by first sorting the\n symbols by length from short to long, and retaining the symbol order\n for codes with equal lengths. Then the code starts with all zero bits\n for the first code of the shortest length, and the codes are integer\n increments for the same length, and zeros are appended as the length\n increases. For the deflate format, these bits are stored backwards\n from their more natural integer increment ordering, and so when the\n decoding tables are built in the large loop below, the integer codes\n are incremented backwards.\n\n This routine assumes, but does not check, that all of the entries in\n lens[] are in the range 0..MAXBITS. The caller must assure this.\n 1..MAXBITS is interpreted as that code length. zero means that that\n symbol does not occur in this code.\n\n The codes are sorted by computing a count of codes for each length,\n creating from that a table of starting indices for each length in the\n sorted table, and then entering the symbols in order in the sorted\n table. The sorted table is work[], with that space being provided by\n the caller.\n\n The length counts are used for other purposes as well, i.e. finding\n the minimum and maximum length codes, determining if there are any\n codes at all, checking for a valid set of lengths, and looking ahead\n at length counts to determine sub-table sizes when building the\n decoding tables.\n */\n\n /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */\n for (len = 0; len <= MAXBITS; len++)\n count[len] = 0;\n for (sym = 0; sym < codes; sym++)\n count[lens[sym]]++;\n\n /* bound code lengths, force root to be within code lengths */\n root = *bits;\n for (max = MAXBITS; max >= 1; max--)\n if (count[max] != 0) break;\n if (root > max) root = max;\n if (max == 0) return -1; /* no codes! */\n for (min = 1; min <= MAXBITS; min++)\n if (count[min] != 0) break;\n if (root < min) root = min;\n\n /* check for an over-subscribed or incomplete set of lengths */\n left = 1;\n for (len = 1; len <= MAXBITS; len++) {\n left <<= 1;\n left -= count[len];\n if (left < 0) return -1; /* over-subscribed */\n }\n if (left > 0 && (type == CODES || max != 1))\n return -1; /* incomplete set */\n\n /* generate offsets into symbol table for each length for sorting */\n offs[1] = 0;\n for (len = 1; len < MAXBITS; len++)\n offs[len + 1] = offs[len] + count[len];\n\n /* sort symbols by length, by symbol order within each length */\n for (sym = 0; sym < codes; sym++)\n if (lens[sym] != 0) work[offs[lens[sym]]++] = (unsigned short)sym;\n\n /*\n Create and fill in decoding tables. In this loop, the table being\n filled is at next and has curr index bits. The code being used is huff\n with length len. That code is converted to an index by dropping drop\n bits off of the bottom. For codes where len is less than drop + curr,\n those top drop + curr - len bits are incremented through all values to\n fill the table with replicated entries.\n\n root is the number of index bits for the root table. When len exceeds\n root, sub-tables are created pointed to by the root entry with an index\n of the low root bits of huff. This is saved in low to check for when a\n new sub-table should be started. drop is zero when the root table is\n being filled, and drop is root when sub-tables are being filled.\n\n When a new sub-table is needed, it is necessary to look ahead in the\n code lengths to determine what size sub-table is needed. The length\n counts are used for this, and so count[] is decremented as codes are\n entered in the tables.\n\n used keeps track of how many table entries have been allocated from the\n provided *table space. It is checked for LENS and DIST tables against\n the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in\n the initial root table size constants. See the comments in inftree9.h\n for more information.\n\n sym increments through all symbols, and the loop terminates when\n all codes of length max, i.e. all codes, have been processed. This\n routine permits incomplete codes, so another loop after this one fills\n in the rest of the decoding tables with invalid code markers.\n */\n\n /* set up for code type */\n switch (type) {\n case CODES:\n base = extra = work; /* dummy value--not used */\n end = 19;\n break;\n case LENS:\n base = lbase;\n base -= 257;\n extra = lext;\n extra -= 257;\n end = 256;\n break;\n default: /* DISTS */\n base = dbase;\n extra = dext;\n end = -1;\n }\n\n /* initialize state for loop */\n huff = 0; /* starting code */\n sym = 0; /* starting code symbol */\n len = min; /* starting code length */\n next = *table; /* current table to fill in */\n curr = root; /* current table index bits */\n drop = 0; /* current bits to drop from code for index */\n low = (unsigned)(-1); /* trigger new sub-table when len > root */\n used = 1U << root; /* use root table entries */\n mask = used - 1; /* mask for comparing low */\n\n /* check available table space */\n if ((type == LENS && used >= ENOUGH_LENS) ||\n (type == DISTS && used >= ENOUGH_DISTS))\n return 1;\n\n /* process all codes and make table entries */\n for (;;) {\n /* create table entry */\n this.bits = (unsigned char)(len - drop);\n if ((int)(work[sym]) < end) {\n this.op = (unsigned char)0;\n this.val = work[sym];\n }\n else if ((int)(work[sym]) > end) {\n this.op = (unsigned char)(extra[work[sym]]);\n this.val = base[work[sym]];\n }\n else {\n this.op = (unsigned char)(32 + 64); /* end of block */\n this.val = 0;\n }\n\n /* replicate for those indices with low len bits equal to huff */\n incr = 1U << (len - drop);\n fill = 1U << curr;\n do {\n fill -= incr;\n next[(huff >> drop) + fill] = this;\n } while (fill != 0);\n\n /* backwards increment the len-bit code huff */\n incr = 1U << (len - 1);\n while (huff & incr)\n incr >>= 1;\n if (incr != 0) {\n huff &= incr - 1;\n huff += incr;\n }\n else\n huff = 0;\n\n /* go to next symbol, update count, len */\n sym++;\n if (--(count[len]) == 0) {\n if (len == max) break;\n len = lens[work[sym]];\n }\n\n /* create new sub-table if needed */\n if (len > root && (huff & mask) != low) {\n /* if first time, transition to sub-tables */\n if (drop == 0)\n drop = root;\n\n /* increment past last table */\n next += 1U << curr;\n\n /* determine length of next table */\n curr = len - drop;\n left = (int)(1 << curr);\n while (curr + drop < max) {\n left -= count[curr + drop];\n if (left <= 0) break;\n curr++;\n left <<= 1;\n }\n\n /* check for enough space */\n used += 1U << curr;\n if ((type == LENS && used >= ENOUGH_LENS) ||\n (type == DISTS && used >= ENOUGH_DISTS))\n return 1;\n\n /* point entry in root table to sub-table */\n low = huff & mask;\n (*table)[low].op = (unsigned char)curr;\n (*table)[low].bits = (unsigned char)root;\n (*table)[low].val = (unsigned short)(next - *table);\n }\n }\n\n /*\n Fill in rest of table for incomplete codes. This loop is similar to the\n loop above in incrementing huff for table indices. It is assumed that\n len is equal to curr + drop, so there is no loop needed to increment\n through high index bits. When the current sub-table is filled, the loop\n drops back to the root table to fill in any remaining entries there.\n */\n this.op = (unsigned char)64; /* invalid code marker */\n this.bits = (unsigned char)(len - drop);\n this.val = (unsigned short)0;\n while (huff != 0) {\n /* when done with sub-table, drop back to root table */\n if (drop != 0 && (huff & mask) != low) {\n drop = 0;\n len = root;\n next = *table;\n curr = root;\n this.bits = (unsigned char)len;\n }\n\n /* put invalid code marker in table */\n next[huff >> drop] = this;\n\n /* backwards increment the len-bit code huff */\n incr = 1U << (len - 1);\n while (huff & incr)\n incr >>= 1;\n if (incr != 0) {\n huff &= incr - 1;\n huff += incr;\n }\n else\n huff = 0;\n }\n\n /* set return parameters */\n *table += used;\n *bits = root;\n return 0;\n}", "docstring": "/*\n If you use the zlib library in a product, an acknowledgment is welcome\n in the documentation of your product. If for some reason you cannot\n include such an acknowledgment, I would appreciate that you keep this\n copyright string in the executable of your product.\n */\n/*\n Build a set of tables to decode the provided canonical Huffman code.\n The code lengths are lens[0..codes-1]. The result starts at *table,\n whose indices are 0..2^bits-1. work is a writable array of at least\n lens shorts, which is used as a work area. type is the type of code\n to be generated, CODES, LENS, or DISTS. On return, zero is success,\n -1 is an invalid code, and +1 means that ENOUGH isn't enough. table\n on return points to the next available entry's address. bits is the\n requested root table index bits, and on return it is the actual root\n table index bits. It will differ if the request is greater than the\n longest code or if it is less than the shortest code.\n */", "url": "https://github.com/v2v3v4/BigWorld-Engine-14.4.1/blob/4389085c8ce35cff887a4cc18fc47d1133d89ffb/programming/bigworld/third_party/zip/contrib/infback9/inftree9.c#L32-L324", "sha": "4389085c8ce35cff887a4cc18fc47d1133d89ffb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_conv_direct", "code": "static int _conv_direct(float *vArr1,int length1,float *vArr2,int length2, \n\t\t\t\tConvModeType mode,float *vArr3){\n\tint len=0;\n\tint start=0;\n\n\tif(mode==ConvMode_Full){\n\t\tlen=length1+length2-1;\n\t\tstart=-length2+1;\n\t\tfor(int i=start,n=0;i=0;j--,k++){\n\t\t\t\tfloat _value=0;\n\n\t\t\t\tif(k>=0&&k=0;j--,k++){\n\t\t\t\tfloat _value=0;\n\n\t\t\t\tif(k>=0&&klinecount ; i++)\n {\n\tcheck = sec->lines[i];\n\tother = getNextSector(check,sec);\n\n\tif (!other)\n\t continue;\n\n\tif (other->ceilingheight < height)\n\t height = other->ceilingheight;\n }\n return height;\n}", "docstring": "//\n// FIND LOWEST CEILING IN THE SURROUNDING SECTORS\n//", "url": "https://github.com/zserge/fenster/blob/e700581dfb7956dd161aee44fc0cff0663e789a1/examples/doom-c/p_spec.c#L391-L411", "sha": "e700581dfb7956dd161aee44fc0cff0663e789a1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "unixOpen", "code": "static int unixOpen(\n sqlite3_vfs *pVfs, /* The VFS for which this is the xOpen method */\n const char *zPath, /* Pathname of file to be opened */\n sqlite3_file *pFile, /* The file descriptor to be filled in */\n int flags, /* Input flags to control the opening */\n int *pOutFlags /* Output flags returned to SQLite core */\n){\n unixFile *p = (unixFile *)pFile;\n int fd = -1; /* File descriptor returned by open() */\n int openFlags = 0; /* Flags to pass to open() */\n int eType = flags&0x0FFF00; /* Type of file to open */\n int noLock; /* True to omit locking primitives */\n int rc = SQLITE_OK; /* Function Return Code */\n int ctrlFlags = 0; /* UNIXFILE_* flags */\n\n int isExclusive = (flags & SQLITE_OPEN_EXCLUSIVE);\n int isDelete = (flags & SQLITE_OPEN_DELETEONCLOSE);\n int isCreate = (flags & SQLITE_OPEN_CREATE);\n int isReadonly = (flags & SQLITE_OPEN_READONLY);\n int isReadWrite = (flags & SQLITE_OPEN_READWRITE);\n#if SQLITE_ENABLE_LOCKING_STYLE\n int isAutoProxy = (flags & SQLITE_OPEN_AUTOPROXY);\n#endif\n#if defined(__APPLE__) || SQLITE_ENABLE_LOCKING_STYLE\n struct statfs fsInfo;\n#endif\n\n /* If creating a super- or main-file journal, this function will open\n ** a file-descriptor on the directory too. The first time unixSync()\n ** is called the directory file descriptor will be fsync()ed and close()d.\n */\n int isNewJrnl = (isCreate && (\n eType==SQLITE_OPEN_SUPER_JOURNAL\n || eType==SQLITE_OPEN_MAIN_JOURNAL\n || eType==SQLITE_OPEN_WAL\n ));\n\n /* If argument zPath is a NULL pointer, this function is required to open\n ** a temporary file. Use this buffer to store the file name in.\n */\n char zTmpname[MAX_PATHNAME+2];\n const char *zName = zPath;\n\n /* Check the following statements are true:\n **\n ** (a) Exactly one of the READWRITE and READONLY flags must be set, and\n ** (b) if CREATE is set, then READWRITE must also be set, and\n ** (c) if EXCLUSIVE is set, then CREATE must also be set.\n ** (d) if DELETEONCLOSE is set, then CREATE must also be set.\n */\n assert((isReadonly==0 || isReadWrite==0) && (isReadWrite || isReadonly));\n assert(isCreate==0 || isReadWrite);\n assert(isExclusive==0 || isCreate);\n assert(isDelete==0 || isCreate);\n\n /* The main DB, main journal, WAL file and super-journal are never\n ** automatically deleted. Nor are they ever temporary files. */\n assert( (!isDelete && zName) || eType!=SQLITE_OPEN_MAIN_DB );\n assert( (!isDelete && zName) || eType!=SQLITE_OPEN_MAIN_JOURNAL );\n assert( (!isDelete && zName) || eType!=SQLITE_OPEN_SUPER_JOURNAL );\n assert( (!isDelete && zName) || eType!=SQLITE_OPEN_WAL );\n\n /* Assert that the upper layer has set one of the \"file-type\" flags. */\n assert( eType==SQLITE_OPEN_MAIN_DB || eType==SQLITE_OPEN_TEMP_DB\n || eType==SQLITE_OPEN_MAIN_JOURNAL || eType==SQLITE_OPEN_TEMP_JOURNAL\n || eType==SQLITE_OPEN_SUBJOURNAL || eType==SQLITE_OPEN_SUPER_JOURNAL\n || eType==SQLITE_OPEN_TRANSIENT_DB || eType==SQLITE_OPEN_WAL\n );\n\n /* Detect a pid change and reset the PRNG. There is a race condition\n ** here such that two or more threads all trying to open databases at\n ** the same instant might all reset the PRNG. But multiple resets\n ** are harmless.\n */\n if( randomnessPid!=osGetpid(0) ){\n randomnessPid = osGetpid(0);\n sqlite3_randomness(0,0);\n }\n memset(p, 0, sizeof(unixFile));\n\n#ifdef SQLITE_ASSERT_NO_FILES\n /* Applications that never read or write a persistent disk files */\n assert( zName==0 );\n#endif\n\n if( eType==SQLITE_OPEN_MAIN_DB ){\n UnixUnusedFd *pUnused;\n pUnused = findReusableFd(zName, flags);\n if( pUnused ){\n fd = pUnused->fd;\n }else{\n pUnused = sqlite3_malloc64(sizeof(*pUnused));\n if( !pUnused ){\n return SQLITE_NOMEM_BKPT;\n }\n }\n p->pPreallocatedUnused = pUnused;\n\n /* Database filenames are double-zero terminated if they are not\n ** URIs with parameters. Hence, they can always be passed into\n ** sqlite3_uri_parameter(). */\n assert( (flags & SQLITE_OPEN_URI) || zName[strlen(zName)+1]==0 );\n\n }else if( !zName ){\n /* If zName is NULL, the upper layer is requesting a temp file. */\n assert(isDelete && !isNewJrnl);\n rc = unixGetTempname(pVfs->mxPathname, zTmpname);\n if( rc!=SQLITE_OK ){\n return rc;\n }\n zName = zTmpname;\n\n /* Generated temporary filenames are always double-zero terminated\n ** for use by sqlite3_uri_parameter(). */\n assert( zName[strlen(zName)+1]==0 );\n }\n\n /* Determine the value of the flags parameter passed to POSIX function\n ** open(). These must be calculated even if open() is not called, as\n ** they may be stored as part of the file handle and used by the\n ** 'conch file' locking functions later on. */\n if( isReadonly ) openFlags |= O_RDONLY;\n if( isReadWrite ) openFlags |= O_RDWR;\n if( isCreate ) openFlags |= O_CREAT;\n if( isExclusive ) openFlags |= (O_EXCL|O_NOFOLLOW);\n openFlags |= (O_LARGEFILE|O_BINARY|O_NOFOLLOW);\n\n if( fd<0 ){\n mode_t openMode; /* Permissions to create file with */\n uid_t uid; /* Userid for the file */\n gid_t gid; /* Groupid for the file */\n rc = findCreateFileMode(zName, flags, &openMode, &uid, &gid);\n if( rc!=SQLITE_OK ){\n assert( !p->pPreallocatedUnused );\n assert( eType==SQLITE_OPEN_WAL || eType==SQLITE_OPEN_MAIN_JOURNAL );\n return rc;\n }\n fd = robust_open(zName, openFlags, openMode);\n OSTRACE((\"OPENX %-3d %s 0%o\\n\", fd, zName, openFlags));\n assert( !isExclusive || (openFlags & O_CREAT)!=0 );\n if( fd<0 ){\n if( isNewJrnl && errno==EACCES && osAccess(zName, F_OK) ){\n /* If unable to create a journal because the directory is not\n ** writable, change the error code to indicate that. */\n rc = SQLITE_READONLY_DIRECTORY;\n }else if( errno!=EISDIR && isReadWrite ){\n /* Failed to open the file for read/write access. Try read-only. */\n UnixUnusedFd *pReadonly = 0;\n flags &= ~(SQLITE_OPEN_READWRITE|SQLITE_OPEN_CREATE);\n openFlags &= ~(O_RDWR|O_CREAT);\n flags |= SQLITE_OPEN_READONLY;\n openFlags |= O_RDONLY;\n isReadonly = 1;\n pReadonly = findReusableFd(zName, flags);\n if( pReadonly ){\n fd = pReadonly->fd;\n sqlite3_free(pReadonly);\n }else{\n fd = robust_open(zName, openFlags, openMode);\n }\n }\n }\n if( fd<0 ){\n int rc2 = unixLogError(SQLITE_CANTOPEN_BKPT, \"open\", zName);\n if( rc==SQLITE_OK ) rc = rc2;\n goto open_finished;\n }\n\n /* The owner of the rollback journal or WAL file should always be the\n ** same as the owner of the database file. Try to ensure that this is\n ** the case. The chown() system call will be a no-op if the current\n ** process lacks root privileges, be we should at least try. Without\n ** this step, if a root process opens a database file, it can leave\n ** behinds a journal/WAL that is owned by root and hence make the\n ** database inaccessible to unprivileged processes.\n **\n ** If openMode==0, then that means uid and gid are not set correctly\n ** (probably because SQLite is configured to use 8+3 filename mode) and\n ** in that case we do not want to attempt the chown().\n */\n if( openMode && (flags & (SQLITE_OPEN_WAL|SQLITE_OPEN_MAIN_JOURNAL))!=0 ){\n robustFchown(fd, uid, gid);\n }\n }\n assert( fd>=0 );\n if( pOutFlags ){\n *pOutFlags = flags;\n }\n\n if( p->pPreallocatedUnused ){\n p->pPreallocatedUnused->fd = fd;\n p->pPreallocatedUnused->flags =\n flags & (SQLITE_OPEN_READONLY|SQLITE_OPEN_READWRITE);\n }\n\n if( isDelete ){\n#if OS_VXWORKS\n zPath = zName;\n#elif defined(SQLITE_UNLINK_AFTER_CLOSE)\n zPath = sqlite3_mprintf(\"%s\", zName);\n if( zPath==0 ){\n robust_close(p, fd, __LINE__);\n return SQLITE_NOMEM_BKPT;\n }\n#else\n osUnlink(zName);\n#endif\n }\n#if SQLITE_ENABLE_LOCKING_STYLE\n else{\n p->openFlags = openFlags;\n }\n#endif\n\n#if defined(__APPLE__) || SQLITE_ENABLE_LOCKING_STYLE\n if( fstatfs(fd, &fsInfo) == -1 ){\n storeLastErrno(p, errno);\n robust_close(p, fd, __LINE__);\n return SQLITE_IOERR_ACCESS;\n }\n if (0 == strncmp(\"msdos\", fsInfo.f_fstypename, 5)) {\n ((unixFile*)pFile)->fsFlags |= SQLITE_FSFLAGS_IS_MSDOS;\n }\n if (0 == strncmp(\"exfat\", fsInfo.f_fstypename, 5)) {\n ((unixFile*)pFile)->fsFlags |= SQLITE_FSFLAGS_IS_MSDOS;\n }\n#endif\n\n /* Set up appropriate ctrlFlags */\n if( isDelete ) ctrlFlags |= UNIXFILE_DELETE;\n if( isReadonly ) ctrlFlags |= UNIXFILE_RDONLY;\n noLock = eType!=SQLITE_OPEN_MAIN_DB;\n if( noLock ) ctrlFlags |= UNIXFILE_NOLOCK;\n if( isNewJrnl ) ctrlFlags |= UNIXFILE_DIRSYNC;\n if( flags & SQLITE_OPEN_URI ) ctrlFlags |= UNIXFILE_URI;\n\n#if SQLITE_ENABLE_LOCKING_STYLE\n#if SQLITE_PREFER_PROXY_LOCKING\n isAutoProxy = 1;\n#endif\n if( isAutoProxy && (zPath!=NULL) && (!noLock) && pVfs->xOpen ){\n char *envforce = getenv(\"SQLITE_FORCE_PROXY_LOCKING\");\n int useProxy = 0;\n\n /* SQLITE_FORCE_PROXY_LOCKING==1 means force always use proxy, 0 means\n ** never use proxy, NULL means use proxy for non-local files only. */\n if( envforce!=NULL ){\n useProxy = atoi(envforce)>0;\n }else{\n useProxy = !(fsInfo.f_flags&MNT_LOCAL);\n }\n if( useProxy ){\n rc = fillInUnixFile(pVfs, fd, pFile, zPath, ctrlFlags);\n if( rc==SQLITE_OK ){\n rc = proxyTransformUnixFile((unixFile*)pFile, \":auto:\");\n if( rc!=SQLITE_OK ){\n /* Use unixClose to clean up the resources added in fillInUnixFile\n ** and clear all the structure's references. Specifically,\n ** pFile->pMethods will be NULL so sqlite3OsClose will be a no-op\n */\n unixClose(pFile);\n return rc;\n }\n }\n goto open_finished;\n }\n }\n#endif\n\n assert( zPath==0 || zPath[0]=='/'\n || eType==SQLITE_OPEN_SUPER_JOURNAL || eType==SQLITE_OPEN_MAIN_JOURNAL\n );\n rc = fillInUnixFile(pVfs, fd, pFile, zPath, ctrlFlags);\n\nopen_finished:\n if( rc!=SQLITE_OK ){\n sqlite3_free(p->pPreallocatedUnused);\n }\n return rc;\n}", "docstring": "/*\n** Open the file zPath.\n**\n** Previously, the SQLite OS layer used three functions in place of this\n** one:\n**\n** sqlite3OsOpenReadWrite();\n** sqlite3OsOpenReadOnly();\n** sqlite3OsOpenExclusive();\n**\n** These calls correspond to the following combinations of flags:\n**\n** ReadWrite() -> (READWRITE | CREATE)\n** ReadOnly() -> (READONLY)\n** OpenExclusive() -> (READWRITE | CREATE | EXCLUSIVE)\n**\n** The old OpenExclusive() accepted a boolean argument - \"delFlag\". If\n** true, the file was configured to be automatically deleted when the\n** file handle closed. To achieve the same effect using this new\n** interface, add the DELETEONCLOSE flag to those specified above for\n** OpenExclusive().\n*/", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c#L44613-L44892", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3PagerSetSpillsize", "code": "SQLITE_PRIVATE int sqlite3PagerSetSpillsize(Pager *pPager, int mxPage){\n return sqlite3PcacheSetSpillsize(pPager->pPCache, mxPage);\n}", "docstring": "/*\n** Change the maximum number of in-memory pages that are allowed\n** before attempting to spill pages to journal.\n*/", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c#L60624-L60626", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3PagerSetFlags", "code": "SQLITE_PRIVATE void sqlite3PagerSetFlags(\n Pager *pPager, /* The pager to set safety level for */\n unsigned pgFlags /* Various flags */\n){\n unsigned level = pgFlags & PAGER_SYNCHRONOUS_MASK;\n if( pPager->tempFile ){\n pPager->noSync = 1;\n pPager->fullSync = 0;\n pPager->extraSync = 0;\n }else{\n pPager->noSync = level==PAGER_SYNCHRONOUS_OFF ?1:0;\n pPager->fullSync = level>=PAGER_SYNCHRONOUS_FULL ?1:0;\n pPager->extraSync = level==PAGER_SYNCHRONOUS_EXTRA ?1:0;\n }\n if( pPager->noSync ){\n pPager->syncFlags = 0;\n }else if( pgFlags & PAGER_FULLFSYNC ){\n pPager->syncFlags = SQLITE_SYNC_FULL;\n }else{\n pPager->syncFlags = SQLITE_SYNC_NORMAL;\n }\n pPager->walSyncFlags = (pPager->syncFlags<<2);\n if( pPager->fullSync ){\n pPager->walSyncFlags |= pPager->syncFlags;\n }\n if( (pgFlags & PAGER_CKPT_FULLFSYNC) && !pPager->noSync ){\n pPager->walSyncFlags |= (SQLITE_SYNC_FULL<<2);\n }\n if( pgFlags & PAGER_CACHESPILL ){\n pPager->doNotSpill &= ~SPILLFLAG_OFF;\n }else{\n pPager->doNotSpill |= SPILLFLAG_OFF;\n }\n}", "docstring": "/*\n** Adjust settings of the pager to those specified in the pgFlags parameter.\n**\n** The \"level\" in pgFlags & PAGER_SYNCHRONOUS_MASK sets the robustness\n** of the database to damage due to OS crashes or power failures by\n** changing the number of syncs()s when writing the journals.\n** There are four levels:\n**\n** OFF sqlite3OsSync() is never called. This is the default\n** for temporary and transient files.\n**\n** NORMAL The journal is synced once before writes begin on the\n** database. This is normally adequate protection, but\n** it is theoretically possible, though very unlikely,\n** that an inopertune power failure could leave the journal\n** in a state which would cause damage to the database\n** when it is rolled back.\n**\n** FULL The journal is synced twice before writes begin on the\n** database (with some additional information - the nRec field\n** of the journal header - being written in between the two\n** syncs). If we assume that writing a\n** single disk sector is atomic, then this mode provides\n** assurance that the journal will not be corrupted to the\n** point of causing damage to the database during rollback.\n**\n** EXTRA This is like FULL except that is also syncs the directory\n** that contains the rollback journal after the rollback\n** journal is unlinked.\n**\n** The above is for a rollback-journal mode. For WAL mode, OFF continues\n** to mean that no syncs ever occur. NORMAL means that the WAL is synced\n** prior to the start of checkpoint and that the database file is synced\n** at the conclusion of the checkpoint if the entire content of the WAL\n** was written back into the database. But no sync operations occur for\n** an ordinary commit in NORMAL mode with WAL. FULL means that the WAL\n** file is synced following each commit operation, in addition to the\n** syncs associated with NORMAL. There is no difference between FULL\n** and EXTRA for WAL mode.\n**\n** Do not confuse synchronous=FULL with SQLITE_SYNC_FULL. The\n** SQLITE_SYNC_FULL macro means to use the MacOSX-style full-fsync\n** using fcntl(F_FULLFSYNC). SQLITE_SYNC_NORMAL means to do an\n** ordinary fsync() call. There is no difference between SQLITE_SYNC_FULL\n** and SQLITE_SYNC_NORMAL on platforms other than MacOSX. But the\n** synchronous=FULL versus synchronous=NORMAL setting determines when\n** the xSync primitive is called and is relevant to all platforms.\n**\n** Numeric values associated with these states are OFF==1, NORMAL=2,\n** and FULL=3.\n*/", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c#L60710-L60743", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3WalWriteLock", "code": "SQLITE_PRIVATE int sqlite3WalWriteLock(Wal *pWal, int bLock){\n int rc = SQLITE_OK;\n assert( pWal->readLock<0 || bLock==0 );\n if( bLock ){\n assert( pWal->db );\n if( walEnableBlocking(pWal) ){\n rc = walLockExclusive(pWal, WAL_WRITE_LOCK, 1);\n if( rc==SQLITE_OK ){\n pWal->writeLock = 1;\n }\n walDisableBlocking(pWal);\n }\n }else if( pWal->writeLock ){\n walUnlockExclusive(pWal, WAL_WRITE_LOCK, 1);\n pWal->writeLock = 0;\n }\n return rc;\n}", "docstring": "/*\n** If parameter bLock is true, attempt to enable blocking locks, take\n** the WRITER lock, and then disable blocking locks. If blocking locks\n** cannot be enabled, no attempt to obtain the WRITER lock is made. Return\n** an SQLite error code if an error occurs, or SQLITE_OK otherwise. It is not\n** an error if blocking locks can not be enabled.\n**\n** If the bLock parameter is false and the WRITER lock is held, release it.\n*/", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c#L66966-L66983", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3VdbeAssertMayAbort", "code": "SQLITE_PRIVATE int sqlite3VdbeAssertMayAbort(Vdbe *v, int mayAbort){\n int hasAbort = 0;\n int hasFkCounter = 0;\n int hasCreateTable = 0;\n int hasCreateIndex = 0;\n int hasInitCoroutine = 0;\n Op *pOp;\n VdbeOpIter sIter;\n\n if( v==0 ) return 0;\n memset(&sIter, 0, sizeof(sIter));\n sIter.v = v;\n\n while( (pOp = opIterNext(&sIter))!=0 ){\n int opcode = pOp->opcode;\n if( opcode==OP_Destroy || opcode==OP_VUpdate || opcode==OP_VRename\n || opcode==OP_VDestroy\n || opcode==OP_VCreate\n || opcode==OP_ParseSchema\n || opcode==OP_Function || opcode==OP_PureFunc\n || ((opcode==OP_Halt || opcode==OP_HaltIfNull)\n && ((pOp->p1)!=SQLITE_OK && pOp->p2==OE_Abort))\n ){\n hasAbort = 1;\n break;\n }\n if( opcode==OP_CreateBtree && pOp->p3==BTREE_INTKEY ) hasCreateTable = 1;\n if( mayAbort ){\n /* hasCreateIndex may also be set for some DELETE statements that use\n ** OP_Clear. So this routine may end up returning true in the case\n ** where a \"DELETE FROM tbl\" has a statement-journal but does not\n ** require one. This is not so bad - it is an inefficiency, not a bug. */\n if( opcode==OP_CreateBtree && pOp->p3==BTREE_BLOBKEY ) hasCreateIndex = 1;\n if( opcode==OP_Clear ) hasCreateIndex = 1;\n }\n if( opcode==OP_InitCoroutine ) hasInitCoroutine = 1;\n#ifndef SQLITE_OMIT_FOREIGN_KEY\n if( opcode==OP_FkCounter && pOp->p1==0 && pOp->p2==1 ){\n hasFkCounter = 1;\n }\n#endif\n }\n sqlite3DbFree(v->db, sIter.apSub);\n\n /* Return true if hasAbort==mayAbort. Or if a malloc failure occurred.\n ** If malloc failed, then the while() loop above may not have iterated\n ** through all opcodes and hasAbort may be set incorrectly. Return\n ** true for this case to prevent the assert() in the callers frame\n ** from failing. */\n return ( v->db->mallocFailed || hasAbort==mayAbort || hasFkCounter\n || (hasCreateTable && hasInitCoroutine) || hasCreateIndex\n );\n}", "docstring": "/*\n** Check if the program stored in the VM associated with pParse may\n** throw an ABORT exception (causing the statement, but not entire transaction\n** to be rolled back). This condition is true if the main program or any\n** sub-programs contains any of the following:\n**\n** * OP_Halt with P1=SQLITE_CONSTRAINT and P2=OE_Abort.\n** * OP_HaltIfNull with P1=SQLITE_CONSTRAINT and P2=OE_Abort.\n** * OP_Destroy\n** * OP_VUpdate\n** * OP_VCreate\n** * OP_VRename\n** * OP_FkCounter with P2==0 (immediate foreign key constraint)\n** * OP_CreateBtree/BTREE_INTKEY and OP_InitCoroutine\n** (for CREATE TABLE AS SELECT ...)\n**\n** Then check that the value of Parse.mayAbort is true if an\n** ABORT may be thrown, or false otherwise. Return true if it does\n** match, or false otherwise. This function is intended to be used as\n** part of an assert statement in the compiler. Similar to:\n**\n** assert( sqlite3VdbeAssertMayAbort(pParse->pVdbe, pParse->mayAbort) );\n*/", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c#L85609-L85661", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sessionStat1Sql", "code": "static int sessionStat1Sql(sqlite3 *db, SessionApplyCtx *p){\n int rc = sessionSelectRow(db, \"sqlite_stat1\", p);\n if( rc==SQLITE_OK ){\n rc = sessionPrepare(db, &p->pInsert,\n \"INSERT INTO main.sqlite_stat1 VALUES(?1, \"\n \"CASE WHEN length(?2)=0 AND typeof(?2)='blob' THEN NULL ELSE ?2 END, \"\n \"?3)\"\n );\n }\n if( rc==SQLITE_OK ){\n rc = sessionPrepare(db, &p->pDelete,\n \"DELETE FROM main.sqlite_stat1 WHERE tbl=?1 AND idx IS \"\n \"CASE WHEN length(?2)=0 AND typeof(?2)='blob' THEN NULL ELSE ?2 END \"\n \"AND (?4 OR stat IS ?3)\"\n );\n }\n return rc;\n}", "docstring": "/*\n** Prepare statements for applying changes to the sqlite_stat1 table.\n** These are similar to those created by sessionSelectRow(),\n** sessionInsertRow(), sessionUpdateRow() and sessionDeleteRow() for\n** other tables.\n*/", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c#L229415-L229432", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fts5structEofMethod", "code": "static int fts5structEofMethod(sqlite3_vtab_cursor *cur){\n Fts5StructVcsr *pCsr = (Fts5StructVcsr*)cur;\n return pCsr->pStruct==0;\n}", "docstring": "/*\n** Return TRUE if the cursor has been moved off of the last\n** row of output.\n*/", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/github.com/mattn/go-sqlite3/sqlite3-binding.c#L249422-L249425", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ZSTD_findFrameCompressedSize", "code": "size_t ZSTD_findFrameCompressedSize(const void *src, size_t srcSize)\n{\n return ZSTD_findFrameCompressedSize_advanced(src, srcSize, ZSTD_f_zstd1);\n}", "docstring": "/** ZSTD_findFrameCompressedSize() :\n * See docs in zstd.h\n * Note: compatible with legacy mode */", "url": "https://github.com/schombert/Project-Alice/blob/ad0c795edf7aab314439dbf68afca7a9a4c24bfb/src/zstd/decompress/zstd_decompress.c#L807-L810", "sha": "ad0c795edf7aab314439dbf68afca7a9a4c24bfb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ZSTD_resetDStream", "code": "size_t ZSTD_resetDStream(ZSTD_DStream* dctx)\n{\n DEBUGLOG(4, \"ZSTD_resetDStream\");\n FORWARD_IF_ERROR(ZSTD_DCtx_reset(dctx, ZSTD_reset_session_only), \"\");\n return ZSTD_startingInputLength(dctx->format);\n}", "docstring": "/* ZSTD_resetDStream() :\n * return : expected size, aka ZSTD_startingInputLength().\n * this function cannot fail */", "url": "https://github.com/schombert/Project-Alice/blob/ad0c795edf7aab314439dbf68afca7a9a4c24bfb/src/zstd/decompress/zstd_decompress.c#L1770-L1775", "sha": "ad0c795edf7aab314439dbf68afca7a9a4c24bfb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "COVER_best_destroy", "code": "void COVER_best_destroy(COVER_best_t *best) {\n if (!best) {\n return;\n }\n COVER_best_wait(best);\n if (best->dict) {\n free(best->dict);\n }\n ZSTD_pthread_mutex_destroy(&best->mutex);\n ZSTD_pthread_cond_destroy(&best->cond);\n}", "docstring": "/**\n * Call COVER_best_wait() and then destroy the COVER_best_t.\n */", "url": "https://github.com/schombert/Project-Alice/blob/ad0c795edf7aab314439dbf68afca7a9a4c24bfb/src/zstd/dictBuilder/cover.c#L882-L892", "sha": "ad0c795edf7aab314439dbf68afca7a9a4c24bfb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fts_palloc", "code": "static int\nfts_palloc(FTS *sp, size_t size)\n{\n\tchar *new;\n\n\t_DIAGASSERT(sp != NULL);\n\n#ifdef __FTS_COMPAT_LENGTH\n\t/* Protect against fts_pathlen overflow. */\n\tif (size > USHRT_MAX + 1) {\n\t\terrno = ENAMETOOLONG;\n\t\treturn (1);\n\t}\n#endif\n\tsize = fts_pow2(size);\n\tnew = realloc(sp->fts_path, size);\n\tif (new == 0)\n\t\treturn (1);\n\tsp->fts_path = new;\n\tsp->fts_pathlen = fts_pathlen_truncate(size);\n\treturn (0);\n}", "docstring": "/*\n * Allow essentially unlimited paths; find, rm, ls should all work on any tree.\n * Most systems will allow creation of paths much longer than MAXPATHLEN, even\n * though the kernel won't resolve them. Round up the new size to a power of 2,\n * so we don't realloc the path 2 bytes at a time.\n */", "url": "https://github.com/rsms/llvmbox/blob/45b9a5a62f556bf8ec92899c18091109a14bbe2d/musl-fts/fts.c#L1153-L1174", "sha": "45b9a5a62f556bf8ec92899c18091109a14bbe2d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__eqsf2", "code": "int __eqsf2(fp_t a, fp_t b) { return __lesf2(a, b); }", "docstring": "// The alias mechanism doesn't work on Windows except for MinGW, so emit\n// wrapper functions.", "url": "https://github.com/rsms/llvmbox/blob/45b9a5a62f556bf8ec92899c18091109a14bbe2d/sysroots/compiler-rt/builtins/comparesf2.c#L73-L73", "sha": "45b9a5a62f556bf8ec92899c18091109a14bbe2d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "csinhl", "code": "long double complex csinhl(long double complex z)\n{\n\treturn csinh(z);\n}", "docstring": "//FIXME", "url": "https://github.com/rsms/llvmbox/blob/45b9a5a62f556bf8ec92899c18091109a14bbe2d/sysroots/libc/musl/src/complex/csinhl.c#L4-L7", "sha": "45b9a5a62f556bf8ec92899c18091109a14bbe2d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "tre_make_trans", "code": "static reg_errcode_t\ntre_make_trans(tre_pos_and_tags_t *p1, tre_pos_and_tags_t *p2,\n\t tre_tnfa_transition_t *transitions,\n\t int *counts, int *offs)\n{\n tre_pos_and_tags_t *orig_p2 = p2;\n tre_tnfa_transition_t *trans;\n int i, j, k, l, dup, prev_p2_pos;\n\n if (transitions != NULL)\n while (p1->position >= 0)\n {\n\tp2 = orig_p2;\n\tprev_p2_pos = -1;\n\twhile (p2->position >= 0)\n\t {\n\t /* Optimization: if this position was already handled, skip it. */\n\t if (p2->position == prev_p2_pos)\n\t {\n\t\tp2++;\n\t\tcontinue;\n\t }\n\t prev_p2_pos = p2->position;\n\t /* Set `trans' to point to the next unused transition from\n\t position `p1->position'. */\n\t trans = transitions + offs[p1->position];\n\t while (trans->state != NULL)\n\t {\n#if 0\n\t\t/* If we find a previous transition from `p1->position' to\n\t\t `p2->position', it is overwritten. This can happen only\n\t\t if there are nested loops in the regexp, like in \"((a)*)*\".\n\t\t In POSIX.2 repetition using the outer loop is always\n\t\t preferred over using the inner loop.\t Therefore the\n\t\t transition for the inner loop is useless and can be thrown\n\t\t away. */\n\t\t/* XXX - The same position is used for all nodes in a bracket\n\t\t expression, so this optimization cannot be used (it will\n\t\t break bracket expressions) unless I figure out a way to\n\t\t detect it here. */\n\t\tif (trans->state_id == p2->position)\n\t\t {\n\t\t break;\n\t\t }\n#endif\n\t\ttrans++;\n\t }\n\n\t if (trans->state == NULL)\n\t (trans + 1)->state = NULL;\n\t /* Use the character ranges, assertions, etc. from `p1' for\n\t the transition from `p1' to `p2'. */\n\t trans->code_min = p1->code_min;\n\t trans->code_max = p1->code_max;\n\t trans->state = transitions + offs[p2->position];\n\t trans->state_id = p2->position;\n\t trans->assertions = p1->assertions | p2->assertions\n\t | (p1->class ? ASSERT_CHAR_CLASS : 0)\n\t | (p1->neg_classes != NULL ? ASSERT_CHAR_CLASS_NEG : 0);\n\t if (p1->backref >= 0)\n\t {\n\t\tassert((trans->assertions & ASSERT_CHAR_CLASS) == 0);\n\t\tassert(p2->backref < 0);\n\t\ttrans->u.backref = p1->backref;\n\t\ttrans->assertions |= ASSERT_BACKREF;\n\t }\n\t else\n\t trans->u.class = p1->class;\n\t if (p1->neg_classes != NULL)\n\t {\n\t\tfor (i = 0; p1->neg_classes[i] != (tre_ctype_t)0; i++);\n\t\ttrans->neg_classes =\n\t\t xmalloc(sizeof(*trans->neg_classes) * (i + 1));\n\t\tif (trans->neg_classes == NULL)\n\t\t return REG_ESPACE;\n\t\tfor (i = 0; p1->neg_classes[i] != (tre_ctype_t)0; i++)\n\t\t trans->neg_classes[i] = p1->neg_classes[i];\n\t\ttrans->neg_classes[i] = (tre_ctype_t)0;\n\t }\n\t else\n\t trans->neg_classes = NULL;\n\n\t /* Find out how many tags this transition has. */\n\t i = 0;\n\t if (p1->tags != NULL)\n\t while(p1->tags[i] >= 0)\n\t\ti++;\n\t j = 0;\n\t if (p2->tags != NULL)\n\t while(p2->tags[j] >= 0)\n\t\tj++;\n\n\t /* If we are overwriting a transition, free the old tag array. */\n\t if (trans->tags != NULL)\n\t xfree(trans->tags);\n\t trans->tags = NULL;\n\n\t /* If there were any tags, allocate an array and fill it. */\n\t if (i + j > 0)\n\t {\n\t\ttrans->tags = xmalloc(sizeof(*trans->tags) * (i + j + 1));\n\t\tif (!trans->tags)\n\t\t return REG_ESPACE;\n\t\ti = 0;\n\t\tif (p1->tags != NULL)\n\t\t while(p1->tags[i] >= 0)\n\t\t {\n\t\t trans->tags[i] = p1->tags[i];\n\t\t i++;\n\t\t }\n\t\tl = i;\n\t\tj = 0;\n\t\tif (p2->tags != NULL)\n\t\t while (p2->tags[j] >= 0)\n\t\t {\n\t\t /* Don't add duplicates. */\n\t\t dup = 0;\n\t\t for (k = 0; k < i; k++)\n\t\t\tif (trans->tags[k] == p2->tags[j])\n\t\t\t {\n\t\t\t dup = 1;\n\t\t\t break;\n\t\t\t }\n\t\t if (!dup)\n\t\t\ttrans->tags[l++] = p2->tags[j];\n\t\t j++;\n\t\t }\n\t\ttrans->tags[l] = -1;\n\t }\n\n\t p2++;\n\t }\n\tp1++;\n }\n else\n /* Compute a maximum limit for the number of transitions leaving\n from each state. */\n while (p1->position >= 0)\n {\n\tp2 = orig_p2;\n\twhile (p2->position >= 0)\n\t {\n\t counts[p1->position]++;\n\t p2++;\n\t }\n\tp1++;\n }\n return REG_OK;\n}", "docstring": "/* Adds a transition from each position in `p1' to each position in `p2'. */", "url": "https://github.com/rsms/llvmbox/blob/45b9a5a62f556bf8ec92899c18091109a14bbe2d/sysroots/libc/musl/src/regex/regcomp.c#L2469-L2617", "sha": "45b9a5a62f556bf8ec92899c18091109a14bbe2d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getRandomMove", "code": "Move getRandomMove(Position* position) {\n Move moves[MAX_BRANCHING_FACTOR];\n int totalMoves = legalMoves(moves, position, position->toMove);\n int chosenMove = rand() % totalMoves;\n return moves[chosenMove];\n}", "docstring": "/* _WIN32 */", "url": "https://github.com/Kaliroot10/Flipper-Zero-RogueMaster-Firmware/blob/f8b6725eac77eb4c833293d2c3be0b89a8a1678f/applications/plugins/.chess/fast_chess.c#L2806-L2811", "sha": "f8b6725eac77eb4c833293d2c3be0b89a8a1678f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "rfalNfcDepPSL", "code": "ReturnCode rfalNfcDepPSL(uint8_t BRS, uint8_t FSL) {\n ReturnCode ret;\n uint16_t rxLen;\n uint8_t msgIt;\n uint8_t txBuf[NFCIP_PSLREQ_LEN + NFCIP_PSLPAY_LEN];\n uint8_t rxBuf[NFCIP_PSLRES_LEN];\n\n msgIt = NFCIP_PSLREQ_LEN;\n\n txBuf[msgIt++] = BRS;\n txBuf[msgIt++] = FSL;\n\n /*******************************************************************************/\n /* Send PSL REQ and wait for response */\n /*******************************************************************************/\n EXIT_ON_ERR(\n ret,\n nfcipTxRx(\n NFCIP_CMD_PSL_REQ,\n txBuf,\n nfcipRWTActivation(),\n &txBuf[NFCIP_PSLREQ_LEN],\n (msgIt - NFCIP_PSLREQ_LEN),\n rxBuf,\n NFCIP_PSLRES_LEN,\n &rxLen));\n\n /*******************************************************************************/\n /* PSL sent, check response */\n /*******************************************************************************/\n msgIt = 0;\n rxLen = (uint16_t)(rxBuf[msgIt++]); /* use LEN byte */\n\n if(rxLen < NFCIP_PSLRES_LEN) /* Checking length: LEN + RLS_RES */\n {\n return ERR_PROTO;\n }\n\n if(rxBuf[msgIt++] != NFCIP_RES) /* Checking if is a response */\n {\n return ERR_PROTO;\n }\n\n if(rxBuf[msgIt++] != (uint8_t)NFCIP_CMD_PSL_RES) /* Checking if is a PSL RES */\n {\n return ERR_PROTO;\n }\n\n if(rxBuf[msgIt++] != gNfcip.cfg.did) /* Checking DID */\n {\n return ERR_PROTO;\n }\n\n return ERR_NONE;\n}", "docstring": "/*******************************************************************************/", "url": "https://github.com/Kaliroot10/Flipper-Zero-RogueMaster-Firmware/blob/f8b6725eac77eb4c833293d2c3be0b89a8a1678f/lib/ST25RFAL002/source/rfal_nfcDep.c#L2078-L2132", "sha": "f8b6725eac77eb4c833293d2c3be0b89a8a1678f", "code/function": "ReturnCode rfalNfcDepPSL(uint8_t BRS, uint8_t FSL) {\n ReturnCode ret;\n uint16_t rxLen;\n uint8_t msgIt;\n uint8_t txBuf[NFCIP_PSLREQ_LEN + NFCIP_PSLPAY_LEN];\n uint8_t rxBuf[NFCIP_PSLRES_LEN];\n\n msgIt = NFCIP_PSLREQ_LEN;\n\n txBuf[msgIt++] = BRS;\n txBuf[msgIt++] = FSL;\n\n \n /* Send PSL REQ and wait for response */\n \n EXIT_ON_ERR(\n ret,\n nfcipTxRx(\n NFCIP_CMD_PSL_REQ,\n txBuf,\n nfcipRWTActivation(),\n &txBuf[NFCIP_PSLREQ_LEN],\n (msgIt - NFCIP_PSLREQ_LEN),\n rxBuf,\n NFCIP_PSLRES_LEN,\n &rxLen));\n\n \n /* PSL sent, check response */\n \n msgIt = 0;\n rxLen = (uint16_t)(rxBuf[msgIt++]); /* use LEN byte */\n\n if(rxLen < NFCIP_PSLRES_LEN) /* Checking length: LEN + RLS_RES */\n {\n return ERR_PROTO;\n }\n\n if(rxBuf[msgIt++] != NFCIP_RES) /* Checking if is a response */\n {\n return ERR_PROTO;\n }\n\n if(rxBuf[msgIt++] != (uint8_t)NFCIP_CMD_PSL_RES) /* Checking if is a PSL RES */\n {\n return ERR_PROTO;\n }\n\n if(rxBuf[msgIt++] != gNfcip.cfg.did) /* Checking DID */\n {\n return ERR_PROTO;\n }\n\n return ERR_NONE;\n}"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "rfalST25xVPollerGetRandomNumber", "code": "ReturnCode rfalST25xVPollerGetRandomNumber(\n uint8_t flags,\n const uint8_t* uid,\n uint8_t* rxBuf,\n uint16_t rxBufLen,\n uint16_t* rcvLen) {\n rfalFieldOff();\n platformDelay(RFAL_ST25TV02K_TRF_OFF);\n rfalNfcvPollerInitialize();\n rfalFieldOnAndStartGT();\n platformDelay(RFAL_ST25TV02K_TBOOT_RF);\n return rfalNfcvPollerTransceiveReq(\n RFAL_NFCV_CMD_GET_RANDOM_NUMBER,\n flags,\n RFAL_NFCV_ST_IC_MFG_CODE,\n uid,\n NULL,\n 0U,\n rxBuf,\n rxBufLen,\n rcvLen);\n}", "docstring": "/*******************************************************************************/", "url": "https://github.com/Kaliroot10/Flipper-Zero-RogueMaster-Firmware/blob/f8b6725eac77eb4c833293d2c3be0b89a8a1678f/lib/ST25RFAL002/source/rfal_st25xv.c#L733-L754", "sha": "f8b6725eac77eb4c833293d2c3be0b89a8a1678f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "rfalTransceiveRunBlockingTx", "code": "static ReturnCode rfalTransceiveRunBlockingTx(void) {\n ReturnCode ret;\n\n do {\n rfalWorker();\n ret = rfalGetTransceiveStatus();\n } while(rfalIsTransceiveInTx() && (ret == ERR_BUSY));\n\n if(rfalIsTransceiveInRx()) {\n return ERR_NONE;\n }\n\n return ret;\n}", "docstring": "/*******************************************************************************/", "url": "https://github.com/Kaliroot10/Flipper-Zero-RogueMaster-Firmware/blob/f8b6725eac77eb4c833293d2c3be0b89a8a1678f/lib/ST25RFAL002/source/st25r3916/rfal_rfst25r3916.c#L1560-L1573", "sha": "f8b6725eac77eb4c833293d2c3be0b89a8a1678f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "u8g2_SetupBuffer", "code": "void u8g2_SetupBuffer(\n u8g2_t* u8g2,\n uint8_t* buf,\n uint8_t tile_buf_height,\n u8g2_draw_ll_hvline_cb ll_hvline_cb,\n const u8g2_cb_t* u8g2_cb) {\n u8g2->font = NULL;\n //u8g2->kerning = NULL;\n //u8g2->get_kerning_cb = u8g2_GetNullKerning;\n\n //u8g2->ll_hvline = u8g2_ll_hvline_vertical_top_lsb;\n u8g2->ll_hvline = ll_hvline_cb;\n\n u8g2->tile_buf_ptr = buf;\n u8g2->tile_buf_height = tile_buf_height;\n\n u8g2->tile_curr_row = 0;\n\n u8g2->font_decode.is_transparent = 0; /* issue 443 */\n u8g2->bitmap_transparency = 0;\n\n u8g2->draw_color = 1;\n u8g2->is_auto_page_clear = 1;\n\n u8g2->cb = u8g2_cb;\n u8g2->cb->update_dimension(u8g2);\n#ifdef U8G2_WITH_CLIP_WINDOW_SUPPORT\n u8g2_SetMaxClipWindow(u8g2); /* assign a clip window and call the update() procedure */\n#else\n u8g2->cb->update_page_win(u8g2);\n#endif\n\n u8g2_SetFontPosBaseline(u8g2); /* issue 195 */\n\n#ifdef U8G2_WITH_FONT_ROTATION\n u8g2->font_decode.dir = 0;\n#endif\n}", "docstring": "/*============================================*/\n/*\n This procedure is called after setting up the display (u8x8 structure).\n --> This is the central init procedure for u8g2 object\n*/", "url": "https://github.com/Kaliroot10/Flipper-Zero-RogueMaster-Firmware/blob/f8b6725eac77eb4c833293d2c3be0b89a8a1678f/lib/u8g2/u8g2_setup.c#L72-L109", "sha": "f8b6725eac77eb4c833293d2c3be0b89a8a1678f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "iscleared", "code": "static int iscleared (global_State *g, const GCObject *o) {\n if (o == NULL) return 0; /* non-collectable value */\n else if (novariant(o->tt) == LUA_TSTRING) {\n markobject(g, o); /* strings are 'values', so are never weak */\n return 0;\n }\n else return iswhite(o);\n}", "docstring": "/*\n** tells whether a key or value can be cleared from a weak\n** table. Non-collectable objects are never removed from weak\n** tables. Strings behave as 'values', so are never removed too. for\n** other objects: if really collected, cannot keep them; for objects\n** being finalized, keep them in keys, but not in values\n*/", "url": "https://github.com/paxo-phone/PaxOS-8/blob/ffa7fcb96f42632a2d6d61e7137d53bf809e673a/src/lib/lua/lgc.c#L185-L192", "sha": "ffa7fcb96f42632a2d6d61e7137d53bf809e673a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "traverseLclosure", "code": "static int traverseLclosure (global_State *g, LClosure *cl) {\n int i;\n markobjectN(g, cl->p); /* mark its prototype */\n for (i = 0; i < cl->nupvalues; i++) { /* visit its upvalues */\n UpVal *uv = cl->upvals[i];\n markobjectN(g, uv); /* mark upvalue */\n }\n return 1 + cl->nupvalues;\n}", "docstring": "/*\n** Traverse a Lua closure, marking its prototype and its upvalues.\n** (Both can be NULL while closure is being created.)\n*/", "url": "https://github.com/paxo-phone/PaxOS-8/blob/ffa7fcb96f42632a2d6d61e7137d53bf809e673a/src/lib/lua/lgc.c#L604-L612", "sha": "ffa7fcb96f42632a2d6d61e7137d53bf809e673a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "report", "code": "static int report (lua_State *L, int status) {\n if (status != LUA_OK) {\n const char *msg = lua_tostring(L, -1);\n l_message(progname, msg);\n lua_pop(L, 1); /* remove message */\n }\n return status;\n}", "docstring": "/*\n** Check whether 'status' is not OK and, if so, prints the error\n** message on the top of the stack. It assumes that the error object\n** is a string, as it was either generated by Lua or by 'msghandler'.\n*/", "url": "https://github.com/paxo-phone/PaxOS-8/blob/ffa7fcb96f42632a2d6d61e7137d53bf809e673a/src/lib/lua/lua.c#L121-L128", "sha": "ffa7fcb96f42632a2d6d61e7137d53bf809e673a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "vorbis_analysis_wrote", "code": "int vorbis_analysis_wrote(vorbis_dsp_state *v, int vals){\n vorbis_info *vi=v->vi;\n codec_setup_info *ci=vi->codec_setup;\n\n if(vals<=0){\n int order=32;\n int i;\n float *lpc=alloca(order*sizeof(*lpc));\n\n /* if it wasn't done earlier (very short sample) */\n if(!v->preextrapolate)\n _preextrapolate_helper(v);\n\n /* We're encoding the end of the stream. Just make sure we have\n [at least] a few full blocks of zeroes at the end. */\n /* actually, we don't want zeroes; that could drop a large\n amplitude off a cliff, creating spread spectrum noise that will\n suck to encode. Extrapolate for the sake of cleanliness. */\n\n vorbis_analysis_buffer(v,ci->blocksizes[1]*3);\n v->eofflag=v->pcm_current;\n v->pcm_current+=ci->blocksizes[1]*3;\n\n for(i=0;ichannels;i++){\n if(v->eofflag>order*2){\n /* extrapolate with LPC to fill in */\n long n;\n\n /* make a predictor filter */\n n=v->eofflag;\n if(n>ci->blocksizes[1])n=ci->blocksizes[1];\n vorbis_lpc_from_data(v->pcm[i]+v->eofflag-n,lpc,n,order);\n\n /* run the predictor filter */\n vorbis_lpc_predict(lpc,v->pcm[i]+v->eofflag-order,order,\n v->pcm[i]+v->eofflag,v->pcm_current-v->eofflag);\n }else{\n /* not enough data to extrapolate (unlikely to happen due to\n guarding the overlap, but bulletproof in case that\n assumtion goes away). zeroes will do. */\n memset(v->pcm[i]+v->eofflag,0,\n (v->pcm_current-v->eofflag)*sizeof(*v->pcm[i]));\n\n }\n }\n }else{\n\n if(v->pcm_current+vals>v->pcm_storage)\n return(OV_EINVAL);\n\n v->pcm_current+=vals;\n\n /* we may want to reverse extrapolate the beginning of a stream\n too... in case we're beginning on a cliff! */\n /* clumsy, but simple. It only runs once, so simple is good. */\n if(!v->preextrapolate && v->pcm_current-v->centerW>ci->blocksizes[1])\n _preextrapolate_helper(v);\n\n }\n return(0);\n}", "docstring": "/* call with val<=0 to set eof */", "url": "https://github.com/STREGAsGate/GateEngine/blob/f755456c313b7e1b8686fd6fd599c29fcc732d64/Dependencies/Vorbis/block.c#L460-L520", "sha": "f755456c313b7e1b8686fd6fd599c29fcc732d64"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "handle_gdb_write_register", "code": "static int handle_gdb_write_register(const char *buff, size_t len)\n{\n\tuint32_t reg_num_gdb, reg_num_rm;\n\tconst char *ptr, *dec;\n\tunion minibuf value;\n\n\tstatic const int gdb_to_rm[] =\n\t\t/* EAX. */ /* GS. */\n\t\t {7,6,5,4,3,2,1,0,13,15,14,12,11,10,9,8};\n\n\tptr = buff;\n\n\texpect_char('P', ptr, len);\n\treg_num_gdb = read_int(ptr, &len, &ptr, 16);\n\texpect_char('=', ptr, len);\n\tdec = decode_hex(ptr, 4);\n\n\tmemcpy(&value, dec, 4);\n\n\t/* Validate register. */\n\tif (reg_num_gdb >= 16)\n\t{\n\t\tsend_gdb_error();\n\t\treturn (-1);\n\t}\n\n\treg_num_rm = gdb_to_rm[reg_num_gdb];\n\n\t/*\n\t * Validate value: 16-bit registers should not\n\t * receive values greater than 16-bit =)\n\t */\n\tif (reg_num_rm >= 8 && value.b32 > ((1<<16)-1))\n\t{\n\t\tsend_gdb_error();\n\t\treturn (-1);\n\t}\n\n\t/* Update our 'cache'. */\n\tx86_regs.r32[reg_num_gdb] = value.b32;\n\n\t/* Send to our serial device. */\n\tsend_serial_byte(SERIAL_STATE_REG_WRITE);\n\tsend_serial_byte(reg_num_rm);\n\tsend_serial_dword(value.b32);\n\treturn (0);\n}", "docstring": "/**\n * @brief Handles the 'write register (P)' GDB command;\n *\n * Please note the the segment registers and EIP,EFLAGS\n * are 16-bit. An attempt to write a 32-bit value on them\n * will emit an error.\n *\n * Also note that the mapping from what we receive from\n * the serial device and the mapping expected by GDB\n * differs, so there is a need to a conversion.\n *\n * @param buff Buffer to be parsed.\n * @param len Buffer length.\n *\n * @return Returns 0 if the command is valid, -1 otherwise.\n */", "url": "https://github.com/Theldus/bread/blob/c4bb9efcae563481c31fa865b64e230d9888e627/gdb.c#L658-L704", "sha": "c4bb9efcae563481c31fa865b64e230d9888e627"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "xTaskCheckForTimeOut", "code": "BaseType_t xTaskCheckForTimeOut( TimeOut_t * const pxTimeOut, TickType_t * const pxTicksToWait )\n{\nBaseType_t xReturn;\n\n\tconfigASSERT( pxTimeOut );\n\tconfigASSERT( pxTicksToWait );\n\n\ttaskENTER_CRITICAL();\n\t{\n\t\t/* Minor optimisation. The tick count cannot change in this block. */\n\t\tconst TickType_t xConstTickCount = xTickCount;\n\t\tconst TickType_t xElapsedTime = xConstTickCount - pxTimeOut->xTimeOnEntering;\n\n\t\t#if( INCLUDE_xTaskAbortDelay == 1 )\n\t\t\tif( pxCurrentTCB->ucDelayAborted != ( uint8_t ) pdFALSE )\n\t\t\t{\n\t\t\t\t/* The delay was aborted, which is not the same as a time out,\n\t\t\t\tbut has the same result. */\n\t\t\t\tpxCurrentTCB->ucDelayAborted = pdFALSE;\n\t\t\t\txReturn = pdTRUE;\n\t\t\t}\n\t\t\telse\n\t\t#endif\n\n\t\t#if ( INCLUDE_vTaskSuspend == 1 )\n\t\t\tif( *pxTicksToWait == portMAX_DELAY )\n\t\t\t{\n\t\t\t\t/* If INCLUDE_vTaskSuspend is set to 1 and the block time\n\t\t\t\tspecified is the maximum block time then the task should block\n\t\t\t\tindefinitely, and therefore never time out. */\n\t\t\t\txReturn = pdFALSE;\n\t\t\t}\n\t\t\telse\n\t\t#endif\n\n\t\tif( ( xNumOfOverflows != pxTimeOut->xOverflowCount ) && ( xConstTickCount >= pxTimeOut->xTimeOnEntering ) ) /*lint !e525 Indentation preferred as is to make code within pre-processor directives clearer. */\n\t\t{\n\t\t\t/* The tick count is greater than the time at which\n\t\t\tvTaskSetTimeout() was called, but has also overflowed since\n\t\t\tvTaskSetTimeOut() was called. It must have wrapped all the way\n\t\t\taround and gone past again. This passed since vTaskSetTimeout()\n\t\t\twas called. */\n\t\t\txReturn = pdTRUE;\n\t\t}\n\t\telse if( xElapsedTime < *pxTicksToWait ) /*lint !e961 Explicit casting is only redundant with some compilers, whereas others require it to prevent integer conversion errors. */\n\t\t{\n\t\t\t/* Not a genuine timeout. Adjust parameters for time remaining. */\n\t\t\t*pxTicksToWait -= xElapsedTime;\n\t\t\tvTaskInternalSetTimeOutState( pxTimeOut );\n\t\t\txReturn = pdFALSE;\n\t\t}\n\t\telse\n\t\t{\n\t\t\t*pxTicksToWait = 0;\n\t\t\txReturn = pdTRUE;\n\t\t}\n\t}\n\ttaskEXIT_CRITICAL();\n\n\treturn xReturn;\n}", "docstring": "/*-----------------------------------------------------------*/", "url": "https://github.com/MakersFunDuck/Hanshow-Auto-Updater/blob/69306b53088f38eb913f933f63fce38946124457/ATC_TLSR_Paper-/Firmware/components/freertos/tasks.c#L3169-L3229", "sha": "69306b53088f38eb913f933f63fce38946124457"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MPU_xEventGroupSync", "code": "EventBits_t MPU_xEventGroupSync( EventGroupHandle_t xEventGroup, const EventBits_t uxBitsToSet, const EventBits_t uxBitsToWaitFor, TickType_t xTicksToWait )\n{\nEventBits_t xReturn;\nBaseType_t xRunningPrivileged = xPortRaisePrivilege();\n\n\txReturn = xEventGroupSync( xEventGroup, uxBitsToSet, uxBitsToWaitFor, xTicksToWait );\n\tvPortResetPrivilege( xRunningPrivileged );\n\n\treturn xReturn;\n}", "docstring": "/*-----------------------------------------------------------*/", "url": "https://github.com/MakersFunDuck/Hanshow-Auto-Updater/blob/69306b53088f38eb913f933f63fce38946124457/ATC_TLSR_Paper-/Firmware/components/freertos/portable/Common/mpu_wrappers.c#L1094-L1103", "sha": "69306b53088f38eb913f933f63fce38946124457"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "wimlib_export_image", "code": "WIMLIBAPI int\nwimlib_export_image(WIMStruct *src_wim,\n\t\t int src_image,\n\t\t WIMStruct *dest_wim,\n\t\t const tchar *dest_name,\n\t\t const tchar *dest_description,\n\t\t int export_flags)\n{\n\tint ret;\n\tint start_src_image;\n\tint end_src_image;\n\tint orig_dest_image_count;\n\tint image;\n\tbool all_images = (src_image == WIMLIB_ALL_IMAGES);\n\n\t/* Check for sane parameters. */\n\tif (export_flags & ~(WIMLIB_EXPORT_FLAG_BOOT |\n\t\t\t WIMLIB_EXPORT_FLAG_NO_NAMES |\n\t\t\t WIMLIB_EXPORT_FLAG_NO_DESCRIPTIONS |\n\t\t\t WIMLIB_EXPORT_FLAG_GIFT |\n\t\t\t WIMLIB_EXPORT_FLAG_WIMBOOT))\n\t\treturn WIMLIB_ERR_INVALID_PARAM;\n\n\tif (!src_wim || !dest_wim)\n\t\treturn WIMLIB_ERR_INVALID_PARAM;\n\n\tif (!wim_has_metadata(src_wim) || !wim_has_metadata(dest_wim))\n\t\treturn WIMLIB_ERR_METADATA_NOT_FOUND;\n\n\tif (all_images) {\n\t\t/* Multi-image export. */\n\t\tif ((!(export_flags & WIMLIB_EXPORT_FLAG_NO_NAMES) &&\n\t\t\tdest_name) ||\n\t\t (!(export_flags & WIMLIB_EXPORT_FLAG_NO_DESCRIPTIONS) &&\n\t\t\tdest_description))\n\t\t{\n\t\t\tERROR(\"Image name and description must be \"\n\t\t\t \"left NULL for multi-image export\");\n\t\t\treturn WIMLIB_ERR_INVALID_PARAM;\n\t\t}\n\t\tstart_src_image = 1;\n\t\tend_src_image = src_wim->hdr.image_count;\n\t} else {\n\t\tstart_src_image = src_image;\n\t\tend_src_image = src_image;\n\t}\n\torig_dest_image_count = dest_wim->hdr.image_count;\n\n\t/* We don't yet support having a single WIMStruct contain duplicate\n\t * 'image_metadata' structures, so we must forbid this from happening.\n\t * A duplication is possible if 'src_wim == dest_wim', if the same image\n\t * is exported to the same destination WIMStruct multiple times, or if\n\t * an image is exported in an A => B => A manner. */\n\tfor (src_image = start_src_image;\n\t src_image <= end_src_image; src_image++)\n\t{\n\t\tconst struct wim_image_metadata *src_imd =\n\t\t\t\tsrc_wim->image_metadata[src_image - 1];\n\t\tfor (int i = 0; i < dest_wim->hdr.image_count; i++)\n\t\t\tif (dest_wim->image_metadata[i] == src_imd)\n\t\t\t\treturn WIMLIB_ERR_DUPLICATE_EXPORTED_IMAGE;\n\t}\n\n\t/* Blob checksums must be known before proceeding. */\n\tret = wim_checksum_unhashed_blobs(src_wim);\n\tif (ret)\n\t\treturn ret;\n\tret = wim_checksum_unhashed_blobs(dest_wim);\n\tif (ret)\n\t\treturn ret;\n\n\t/* Enable rollbacks */\n\tfor_blob_in_table(dest_wim->blob_table, blob_set_not_exported, NULL);\n\n\t/* Forbid exports where the destination WIM already contains image(s)\n\t * with the requested name(s). However, allow multi-image exports where\n\t * there is a duplication among the source names only. */\n\tif (!(export_flags & WIMLIB_EXPORT_FLAG_NO_NAMES)) {\n\t\tfor (src_image = start_src_image;\n\t\t src_image <= end_src_image;\n\t\t src_image++)\n\t\t{\n\t\t\tconst tchar *name = dest_name ? dest_name :\n\t\t\t\twimlib_get_image_name(src_wim, src_image);\n\n\t\t\tif (wimlib_image_name_in_use(dest_wim, name)) {\n\t\t\t\tERROR(\"There is already an image named \\\"%\"TS\"\\\" \"\n\t\t\t\t \"in the destination WIM\", name);\n\t\t\t\tret = WIMLIB_ERR_IMAGE_NAME_COLLISION;\n\t\t\t\tgoto out_rollback;\n\t\t\t}\n\t\t}\n\t}\n\n\t/* Export each requested image. */\n\tfor (src_image = start_src_image;\n\t src_image <= end_src_image;\n\t src_image++)\n\t{\n\t\tconst tchar *next_dest_name, *next_dest_description;\n\t\tstruct wim_image_metadata *src_imd;\n\t\tstruct wim_inode *inode;\n\n\t\t/* Determine destination image name and description. */\n\n\t\tif (export_flags & WIMLIB_EXPORT_FLAG_NO_NAMES)\n\t\t\tnext_dest_name = NULL;\n\t\telse if (dest_name)\n\t\t\tnext_dest_name = dest_name;\n\t\telse\n\t\t\tnext_dest_name = wimlib_get_image_name(src_wim, src_image);\n\n\t\tif (export_flags & WIMLIB_EXPORT_FLAG_NO_DESCRIPTIONS)\n\t\t\tnext_dest_description = NULL;\n\t\telse if (dest_description)\n\t\t\tnext_dest_description = dest_description;\n\t\telse\n\t\t\tnext_dest_description = wimlib_get_image_description(src_wim, src_image);\n\n\t\t/* Load metadata for source image into memory. */\n\t\tret = select_wim_image(src_wim, src_image);\n\t\tif (ret)\n\t\t\tgoto out_rollback;\n\n\t\tsrc_imd = wim_get_current_image_metadata(src_wim);\n\n\t\t/* Iterate through inodes in the source image and export their\n\t\t * blobs into the destination WIM. */\n\t\timage_for_each_inode(inode, src_imd) {\n\t\t\tret = inode_export_blobs(inode,\n\t\t\t\t\t\t src_wim->blob_table,\n\t\t\t\t\t\t dest_wim->blob_table,\n\t\t\t\t\t\t export_flags & WIMLIB_EXPORT_FLAG_GIFT);\n\t\t\tif (ret)\n\t\t\t\tgoto out_rollback;\n\t\t}\n\n\t\t/* Export XML information into the destination WIM. */\n\t\tret = xml_export_image(src_wim->xml_info, src_image,\n\t\t\t\t dest_wim->xml_info, next_dest_name,\n\t\t\t\t next_dest_description,\n\t\t\t\t export_flags & WIMLIB_EXPORT_FLAG_WIMBOOT);\n\t\tif (ret)\n\t\t\tgoto out_rollback;\n\n\t\t/* Reference the source image metadata from the destination WIM.\n\t\t */\n\t\tret = append_image_metadata(dest_wim, src_imd);\n\t\tif (ret)\n\t\t\tgoto out_rollback;\n\t\tsrc_imd->refcnt++;\n\t}\n\n\t/* Image export complete. Finish by setting any needed special metadata\n\t * on the destination WIM. */\n\n\tif (src_wim->hdr.flags & WIM_HDR_FLAG_RP_FIX)\n\t\tdest_wim->hdr.flags |= WIM_HDR_FLAG_RP_FIX;\n\n\tfor (src_image = start_src_image;\n\t src_image <= end_src_image;\n\t src_image++)\n\t{\n\t\tint dst_image = orig_dest_image_count + 1 +\n\t\t\t\t(src_image - start_src_image);\n\n\t\tif ((export_flags & WIMLIB_EXPORT_FLAG_BOOT) &&\n\t\t (!all_images || src_image == src_wim->hdr.boot_idx))\n\t\t\tdest_wim->hdr.boot_idx = dst_image;\n\t}\n\n\treturn 0;\n\nout_rollback:\n\twhile ((image = xml_get_image_count(dest_wim->xml_info))\n\t > orig_dest_image_count)\n\t{\n\t\txml_delete_image(dest_wim->xml_info, image);\n\t}\n\twhile (dest_wim->hdr.image_count > orig_dest_image_count)\n\t{\n\t\tput_image_metadata(dest_wim->image_metadata[\n\t\t\t\t\t--dest_wim->hdr.image_count]);\n\t}\n\tfor_blob_in_table(dest_wim->blob_table, blob_rollback_export,\n\t\t\t dest_wim->blob_table);\n\treturn ret;\n}", "docstring": "/* API function documented in wimlib.h */", "url": "https://github.com/TechUnRestricted/WinDiskWriter/blob/86a003b84ecf0e3e4edd315b202eca3f01c6c0cc/WinDiskWriter/libs/wimlib/src/export_image.c#L108-L295", "sha": "86a003b84ecf0e3e4edd315b202eca3f01c6c0cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lzms_remove_bits", "code": "static forceinline void\nlzms_remove_bits(struct lzms_input_bitstream *is, unsigned num_bits)\n{\n\tis->bitbuf <<= num_bits;\n\tis->bitsleft -= num_bits;\n}", "docstring": "/* Remove @num_bits bits from the bitbuffer variable. */", "url": "https://github.com/TechUnRestricted/WinDiskWriter/blob/86a003b84ecf0e3e4edd315b202eca3f01c6c0cc/WinDiskWriter/libs/wimlib/src/lzms_decompress.c#L418-L423", "sha": "86a003b84ecf0e3e4edd315b202eca3f01c6c0cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "extract_encrypted_file", "code": "static int\nextract_encrypted_file(const struct wim_dentry *dentry,\n\t\t struct win32_apply_ctx *ctx)\n{\n\tvoid *rawctx;\n\tDWORD err;\n\tULONG flags;\n\tbool retried;\n\n\t/* Temporarily build a Win32 path for OpenEncryptedFileRaw() */\n\tbuild_win32_extraction_path(dentry, ctx);\n\n\tflags = CREATE_FOR_IMPORT | OVERWRITE_HIDDEN;\n\tif (dentry->d_inode->i_attributes & FILE_ATTRIBUTE_DIRECTORY)\n\t\tflags |= CREATE_FOR_DIR;\n\n\tretried = false;\nretry:\n\terr = OpenEncryptedFileRaw(ctx->pathbuf.Buffer, flags, &rawctx);\n\tif (err == ERROR_SHARING_VIOLATION && !retried) {\n\t\t/* This can be caused by the handle we have open to the target\n\t\t * directory. Try closing it temporarily. */\n\t\tclose_target_directory(ctx);\n\t\tretried = true;\n\t\tgoto retry;\n\t}\n\n\t/* Restore the NT namespace path */\n\tbuild_extraction_path(dentry, ctx);\n\n\tif (err != ERROR_SUCCESS) {\n\t\twin32_error(err, L\"Can't open \\\"%ls\\\" for encrypted import\",\n\t\t\t current_path(ctx));\n\t\treturn WIMLIB_ERR_OPEN;\n\t}\n\n\tctx->encrypted_offset = 0;\n\n\terr = WriteEncryptedFileRaw(import_encrypted_data, ctx, rawctx);\n\n\tCloseEncryptedFileRaw(rawctx);\n\n\tif (err != ERROR_SUCCESS) {\n\t\twin32_error(err, L\"Can't import encrypted file \\\"%ls\\\"\",\n\t\t\t current_path(ctx));\n\t\treturn WIMLIB_ERR_WRITE;\n\t}\n\n\treturn 0;\n}", "docstring": "/*\n * Write the raw encrypted data to the already-created file (or directory)\n * corresponding to @dentry.\n *\n * The raw encrypted data is provided in ctx->data_buffer, and its size is\n * ctx->encrypted_size.\n *\n * This function may close the target directory, in which case the caller needs\n * to re-open it if needed.\n */", "url": "https://github.com/TechUnRestricted/WinDiskWriter/blob/86a003b84ecf0e3e4edd315b202eca3f01c6c0cc/WinDiskWriter/libs/wimlib/src/win32_apply.c#L2264-L2313", "sha": "86a003b84ecf0e3e4edd315b202eca3f01c6c0cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "generate_random_sid", "code": "static size_t\ngenerate_random_sid(wimlib_SID *sid, struct generation_context *ctx)\n{\n\tu32 r = rand32();\n\n\tsid->revision = 1;\n\n\tif (r & 1) {\n\t\t/* Common SID */\n\t\tr = (r >> 1) % ARRAY_LEN(common_sids);\n\n\t\tsid->sub_authority_count = common_sids[r].num_subauthorities;\n\t\tfor (int i = 0; i < 6; i++) {\n\t\t\tsid->identifier_authority[i] =\n\t\t\t\tcommon_sids[r].identifier_authority >> (40 - i * 8);\n\t\t}\n\t\tfor (int i = 0; i < common_sids[r].num_subauthorities; i++)\n\t\t\tsid->sub_authority[i] = cpu_to_le32(common_sids[r].subauthorities[i]);\n\t} else {\n\t\t/* Random SID */\n\n\t\tsid->sub_authority_count = 1 + ((r >> 1) % 15);\n\n\t\tfor (int i = 0; i < 6; i++)\n\t\t\tsid->identifier_authority[i] = rand8();\n\n\t\tfor (int i = 0; i < sid->sub_authority_count; i++)\n\t\t\tsid->sub_authority[i] = cpu_to_le32(rand32());\n\t}\n\treturn (u8 *)&sid->sub_authority[sid->sub_authority_count] - (u8 *)sid;\n}", "docstring": "/* Generate a SID and return its size in bytes. */", "url": "https://github.com/TechUnRestricted/WinDiskWriter/blob/86a003b84ecf0e3e4edd315b202eca3f01c6c0cc/WinDiskWriter/libs/wimlib/src/test_support/test_support.c#L270-L300", "sha": "86a003b84ecf0e3e4edd315b202eca3f01c6c0cc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "show_parsed", "code": "static void show_parsed(compile_block *cb)\n{\nuint32_t *pptr = cb->parsed_pattern;\n\nfor (;;)\n {\n int max, min;\n PCRE2_SIZE offset;\n uint32_t i;\n uint32_t length;\n uint32_t meta_arg = META_DATA(*pptr);\n\n fprintf(stderr, \"+++ %02d %.8x \", (int)(pptr - cb->parsed_pattern), *pptr);\n\n if (*pptr < META_END)\n {\n if (*pptr > 32 && *pptr < 128) fprintf(stderr, \"%c\", *pptr);\n pptr++;\n }\n\n else switch (META_CODE(*pptr++))\n {\n default:\n fprintf(stderr, \"**** OOPS - unknown META value - giving up ****\\n\");\n return;\n\n case META_END:\n fprintf(stderr, \"META_END\\n\");\n return;\n\n case META_CAPTURE:\n fprintf(stderr, \"META_CAPTURE %d\", meta_arg);\n break;\n\n case META_RECURSE:\n GETOFFSET(offset, pptr);\n fprintf(stderr, \"META_RECURSE %d %zd\", meta_arg, offset);\n break;\n\n case META_BACKREF:\n if (meta_arg < 10)\n offset = cb->small_ref_offset[meta_arg];\n else\n GETOFFSET(offset, pptr);\n fprintf(stderr, \"META_BACKREF %d %zd\", meta_arg, offset);\n break;\n\n case META_ESCAPE:\n if (meta_arg == ESC_P || meta_arg == ESC_p)\n {\n uint32_t ptype = *pptr >> 16;\n uint32_t pvalue = *pptr++ & 0xffff;\n fprintf(stderr, \"META \\\\%c %d %d\", (meta_arg == ESC_P)? 'P':'p',\n ptype, pvalue);\n }\n else\n {\n uint32_t cc;\n /* There's just one escape we might have here that isn't negated in the\n escapes table. */\n if (meta_arg == ESC_g) cc = CHAR_g;\n else for (cc = ESCAPES_FIRST; cc <= ESCAPES_LAST; cc++)\n {\n if (meta_arg == (uint32_t)(-escapes[cc - ESCAPES_FIRST])) break;\n }\n if (cc > ESCAPES_LAST) cc = CHAR_QUESTION_MARK;\n fprintf(stderr, \"META \\\\%c\", cc);\n }\n break;\n\n case META_MINMAX:\n min = *pptr++;\n max = *pptr++;\n if (max != REPEAT_UNLIMITED)\n fprintf(stderr, \"META {%d,%d}\", min, max);\n else\n fprintf(stderr, \"META {%d,}\", min);\n break;\n\n case META_MINMAX_QUERY:\n min = *pptr++;\n max = *pptr++;\n if (max != REPEAT_UNLIMITED)\n fprintf(stderr, \"META {%d,%d}?\", min, max);\n else\n fprintf(stderr, \"META {%d,}?\", min);\n break;\n\n case META_MINMAX_PLUS:\n min = *pptr++;\n max = *pptr++;\n if (max != REPEAT_UNLIMITED)\n fprintf(stderr, \"META {%d,%d}+\", min, max);\n else\n fprintf(stderr, \"META {%d,}+\", min);\n break;\n\n case META_BIGVALUE: fprintf(stderr, \"META_BIGVALUE %.8x\", *pptr++); break;\n case META_CIRCUMFLEX: fprintf(stderr, \"META_CIRCUMFLEX\"); break;\n case META_COND_ASSERT: fprintf(stderr, \"META_COND_ASSERT\"); break;\n case META_DOLLAR: fprintf(stderr, \"META_DOLLAR\"); break;\n case META_DOT: fprintf(stderr, \"META_DOT\"); break;\n case META_ASTERISK: fprintf(stderr, \"META *\"); break;\n case META_ASTERISK_QUERY: fprintf(stderr, \"META *?\"); break;\n case META_ASTERISK_PLUS: fprintf(stderr, \"META *+\"); break;\n case META_PLUS: fprintf(stderr, \"META +\"); break;\n case META_PLUS_QUERY: fprintf(stderr, \"META +?\"); break;\n case META_PLUS_PLUS: fprintf(stderr, \"META ++\"); break;\n case META_QUERY: fprintf(stderr, \"META ?\"); break;\n case META_QUERY_QUERY: fprintf(stderr, \"META ??\"); break;\n case META_QUERY_PLUS: fprintf(stderr, \"META ?+\"); break;\n\n case META_ATOMIC: fprintf(stderr, \"META (?>\"); break;\n case META_NOCAPTURE: fprintf(stderr, \"META (?:\"); break;\n case META_LOOKAHEAD: fprintf(stderr, \"META (?=\"); break;\n case META_LOOKAHEADNOT: fprintf(stderr, \"META (?!\"); break;\n case META_LOOKAHEAD_NA: fprintf(stderr, \"META (*napla:\"); break;\n case META_SCRIPT_RUN: fprintf(stderr, \"META (*sr:\"); break;\n case META_KET: fprintf(stderr, \"META )\"); break;\n case META_ALT: fprintf(stderr, \"META | %d\", meta_arg); break;\n\n case META_CLASS: fprintf(stderr, \"META [\"); break;\n case META_CLASS_NOT: fprintf(stderr, \"META [^\"); break;\n case META_CLASS_END: fprintf(stderr, \"META ]\"); break;\n case META_CLASS_EMPTY: fprintf(stderr, \"META []\"); break;\n case META_CLASS_EMPTY_NOT: fprintf(stderr, \"META [^]\"); break;\n\n case META_RANGE_LITERAL: fprintf(stderr, \"META - (literal)\"); break;\n case META_RANGE_ESCAPED: fprintf(stderr, \"META - (escaped)\"); break;\n\n case META_POSIX: fprintf(stderr, \"META_POSIX %d\", *pptr++); break;\n case META_POSIX_NEG: fprintf(stderr, \"META_POSIX_NEG %d\", *pptr++); break;\n\n case META_ACCEPT: fprintf(stderr, \"META (*ACCEPT)\"); break;\n case META_FAIL: fprintf(stderr, \"META (*FAIL)\"); break;\n case META_COMMIT: fprintf(stderr, \"META (*COMMIT)\"); break;\n case META_PRUNE: fprintf(stderr, \"META (*PRUNE)\"); break;\n case META_SKIP: fprintf(stderr, \"META (*SKIP)\"); break;\n case META_THEN: fprintf(stderr, \"META (*THEN)\"); break;\n\n case META_OPTIONS: fprintf(stderr, \"META_OPTIONS 0x%02x\", *pptr++); break;\n\n case META_LOOKBEHIND:\n fprintf(stderr, \"META (?<= %d offset=\", meta_arg);\n GETOFFSET(offset, pptr);\n fprintf(stderr, \"%zd\", offset);\n break;\n\n case META_LOOKBEHIND_NA:\n fprintf(stderr, \"META (*naplb: %d offset=\", meta_arg);\n GETOFFSET(offset, pptr);\n fprintf(stderr, \"%zd\", offset);\n break;\n\n case META_LOOKBEHINDNOT:\n fprintf(stderr, \"META (?=\");\n fprintf(stderr, \"%d.\", *pptr++);\n fprintf(stderr, \"%d)\", *pptr++);\n break;\n\n case META_COND_NAME:\n fprintf(stderr, \"META (?() length=%d offset=\", *pptr++);\n GETOFFSET(offset, pptr);\n fprintf(stderr, \"%zd\", offset);\n break;\n\n case META_COND_RNAME:\n fprintf(stderr, \"META (?(R&name) length=%d offset=\", *pptr++);\n GETOFFSET(offset, pptr);\n fprintf(stderr, \"%zd\", offset);\n break;\n\n /* This is kept as a name, because it might be. */\n\n case META_COND_RNUMBER:\n fprintf(stderr, \"META (?(Rnumber) length=%d offset=\", *pptr++);\n GETOFFSET(offset, pptr);\n fprintf(stderr, \"%zd\", offset);\n break;\n\n case META_MARK:\n fprintf(stderr, \"META (*MARK:\");\n goto SHOWARG;\n\n case META_COMMIT_ARG:\n fprintf(stderr, \"META (*COMMIT:\");\n goto SHOWARG;\n\n case META_PRUNE_ARG:\n fprintf(stderr, \"META (*PRUNE:\");\n goto SHOWARG;\n\n case META_SKIP_ARG:\n fprintf(stderr, \"META (*SKIP:\");\n goto SHOWARG;\n\n case META_THEN_ARG:\n fprintf(stderr, \"META (*THEN:\");\n SHOWARG:\n length = *pptr++;\n for (i = 0; i < length; i++)\n {\n uint32_t cc = *pptr++;\n if (cc > 32 && cc < 128) fprintf(stderr, \"%c\", cc);\n else fprintf(stderr, \"\\\\x{%x}\", cc);\n }\n fprintf(stderr, \") length=%u\", length);\n break;\n }\n fprintf(stderr, \"\\n\");\n }\nreturn;\n}", "docstring": "/*************************************************\n* Show the parsed pattern for debugging *\n*************************************************/\n/* For debugging the pre-scan, this code, which outputs the parsed data vector,\ncan be enabled. */", "url": "https://github.com/BurntSushi/rebar/blob/19aa8e8e3bd3a4bc0ef6e07774d900e5f4840fad/engines/pcre2/upstream/src/pcre2_compile.c#L922-L1181", "sha": "19aa8e8e3bd3a4bc0ef6e07774d900e5f4840fad"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "convert_glob_char_in_class", "code": "static BOOL\nconvert_glob_char_in_class(int class_index, PCRE2_UCHAR c)\n{\nswitch (class_index)\n {\n case 1: return isalnum(c);\n case 2: return isalpha(c);\n case 3: return 1;\n case 4: return c == CHAR_HT || c == CHAR_SPACE;\n case 5: return iscntrl(c);\n case 6: return isdigit(c);\n case 7: return isgraph(c);\n case 8: return islower(c);\n case 9: return isprint(c);\n case 10: return ispunct(c);\n case 11: return isspace(c);\n case 12: return isupper(c);\n case 13: return isalnum(c) || c == CHAR_UNDERSCORE;\n default: return isxdigit(c);\n }\n}", "docstring": "/* Checks whether the character is in the class.\n\nArguments:\n class_index class index\n c character\n\nReturns: !0 => character is found in the class\n 0 => otherwise\n*/", "url": "https://github.com/BurntSushi/rebar/blob/19aa8e8e3bd3a4bc0ef6e07774d900e5f4840fad/engines/pcre2/upstream/src/pcre2_convert.c#L540-L560", "sha": "19aa8e8e3bd3a4bc0ef6e07774d900e5f4840fad"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "arm_cmplx_mult_real_f32", "code": "void arm_cmplx_mult_real_f32(\n float32_t * pSrcCmplx,\n float32_t * pSrcReal,\n float32_t * pCmplxDst,\n uint32_t numSamples)\n{\n float32_t in; /* Temporary variable to store input value */\n uint32_t blkCnt; /* loop counters */\n\n#if defined (ARM_MATH_DSP)\n\n /* Run the below code for Cortex-M4 and Cortex-M3 */\n float32_t inA1, inA2, inA3, inA4; /* Temporary variables to hold input data */\n float32_t inA5, inA6, inA7, inA8; /* Temporary variables to hold input data */\n float32_t inB1, inB2, inB3, inB4; /* Temporary variables to hold input data */\n float32_t out1, out2, out3, out4; /* Temporary variables to hold output data */\n float32_t out5, out6, out7, out8; /* Temporary variables to hold output data */\n\n /* loop Unrolling */\n blkCnt = numSamples >> 2U;\n\n /* First part of the processing with loop unrolling. Compute 4 outputs at a time.\n ** a second loop below computes the remaining 1 to 3 samples. */\n while (blkCnt > 0U)\n {\n /* C[2 * i] = A[2 * i] * B[i]. */\n /* C[2 * i + 1] = A[2 * i + 1] * B[i]. */\n /* read input from complex input buffer */\n inA1 = pSrcCmplx[0];\n inA2 = pSrcCmplx[1];\n /* read input from real input buffer */\n inB1 = pSrcReal[0];\n\n /* read input from complex input buffer */\n inA3 = pSrcCmplx[2];\n\n /* multiply complex buffer real input with real buffer input */\n out1 = inA1 * inB1;\n\n /* read input from complex input buffer */\n inA4 = pSrcCmplx[3];\n\n /* multiply complex buffer imaginary input with real buffer input */\n out2 = inA2 * inB1;\n\n /* read input from real input buffer */\n inB2 = pSrcReal[1];\n /* read input from complex input buffer */\n inA5 = pSrcCmplx[4];\n\n /* multiply complex buffer real input with real buffer input */\n out3 = inA3 * inB2;\n\n /* read input from complex input buffer */\n inA6 = pSrcCmplx[5];\n /* read input from real input buffer */\n inB3 = pSrcReal[2];\n\n /* multiply complex buffer imaginary input with real buffer input */\n out4 = inA4 * inB2;\n\n /* read input from complex input buffer */\n inA7 = pSrcCmplx[6];\n\n /* multiply complex buffer real input with real buffer input */\n out5 = inA5 * inB3;\n\n /* read input from complex input buffer */\n inA8 = pSrcCmplx[7];\n\n /* multiply complex buffer imaginary input with real buffer input */\n out6 = inA6 * inB3;\n\n /* read input from real input buffer */\n inB4 = pSrcReal[3];\n\n /* store result to destination bufer */\n pCmplxDst[0] = out1;\n\n /* multiply complex buffer real input with real buffer input */\n out7 = inA7 * inB4;\n\n /* store result to destination bufer */\n pCmplxDst[1] = out2;\n\n /* multiply complex buffer imaginary input with real buffer input */\n out8 = inA8 * inB4;\n\n /* store result to destination bufer */\n pCmplxDst[2] = out3;\n pCmplxDst[3] = out4;\n pCmplxDst[4] = out5;\n\n /* incremnet complex input buffer by 8 to process next samples */\n pSrcCmplx += 8U;\n\n /* store result to destination bufer */\n pCmplxDst[5] = out6;\n\n /* increment real input buffer by 4 to process next samples */\n pSrcReal += 4U;\n\n /* store result to destination bufer */\n pCmplxDst[6] = out7;\n pCmplxDst[7] = out8;\n\n /* increment destination buffer by 8 to process next sampels */\n pCmplxDst += 8U;\n\n /* Decrement the numSamples loop counter */\n blkCnt--;\n }\n\n /* If the numSamples is not a multiple of 4, compute any remaining output samples here.\n ** No loop unrolling is used. */\n blkCnt = numSamples % 0x4U;\n\n#else\n\n /* Run the below code for Cortex-M0 */\n blkCnt = numSamples;\n\n#endif /* #if defined (ARM_MATH_DSP) */\n\n while (blkCnt > 0U)\n {\n /* C[2 * i] = A[2 * i] * B[i]. */\n /* C[2 * i + 1] = A[2 * i + 1] * B[i]. */\n in = *pSrcReal++;\n /* store the result in the destination buffer. */\n *pCmplxDst++ = (*pSrcCmplx++) * (in);\n *pCmplxDst++ = (*pSrcCmplx++) * (in);\n\n /* Decrement the numSamples loop counter */\n blkCnt--;\n }\n}", "docstring": "/**\n * @ingroup groupCmplxMath\n */\n/**\n * @defgroup CmplxByRealMult Complex-by-Real Multiplication\n *\n * Multiplies a complex vector by a real vector and generates a complex result.\n * The data in the complex arrays is stored in an interleaved fashion\n * (real, imag, real, imag, ...).\n * The parameter numSamples represents the number of complex\n * samples processed. The complex arrays have a total of 2*numSamples\n * real values while the real array has a total of numSamples\n * real values.\n *\n * The underlying algorithm is used:\n *\n *
\n * for(n=0; n\n *\n * There are separate functions for floating-point, Q15, and Q31 data types.\n */\n/**\n * @addtogroup CmplxByRealMult\n * @{\n */\n/**\n * @brief  Floating-point complex-by-real multiplication\n * @param[in]  *pSrcCmplx points to the complex input vector\n * @param[in]  *pSrcReal points to the real input vector\n * @param[out]  *pCmplxDst points to the complex output vector\n * @param[in]  numSamples number of samples in each vector\n * @return none.\n */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/CMSIS/DSP/Source/ComplexMathFunctions/arm_cmplx_mult_real_f32.c#L73-L209", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_DCMI_EnableCrop", "code": "HAL_StatusTypeDef HAL_DCMI_EnableCrop(DCMI_HandleTypeDef *hdcmi)\n{\n  /* Process Locked */\n  __HAL_LOCK(hdcmi);\n\n  /* Lock the DCMI peripheral state */\n  hdcmi->State = HAL_DCMI_STATE_BUSY;\n\n  /* Enable DCMI Crop feature */\n  hdcmi->Instance->CR |= (uint32_t)DCMI_CR_CROP;\n\n  /* Change the DCMI state*/\n  hdcmi->State = HAL_DCMI_STATE_READY;\n\n  /* Process Unlocked */\n  __HAL_UNLOCK(hdcmi);\n\n  return HAL_OK;\n}", "docstring": "/**\n  * @brief  Enable the Crop feature.\n  * @param  hdcmi pointer to a DCMI_HandleTypeDef structure that contains\n  *                the configuration information for DCMI.\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_dcmi.c#L795-L813", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_DSI_Start", "code": "HAL_StatusTypeDef HAL_DSI_Start(DSI_HandleTypeDef *hdsi)\n{\n  /* Process locked */\n  __HAL_LOCK(hdsi);\n\n  /* Enable the DSI host */\n  __HAL_DSI_ENABLE(hdsi);\n\n  /* Enable the DSI wrapper */\n  __HAL_DSI_WRAPPER_ENABLE(hdsi);\n\n  /* Process unlocked */\n  __HAL_UNLOCK(hdsi);\n\n  return HAL_OK;\n}", "docstring": "/**\n  * @brief  Start the DSI module\n  * @param  hdsi  pointer to a DSI_HandleTypeDef structure that contains\n  *               the configuration information for the DSI.\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_dsi.c#L1452-L1467", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_FMPI2C_Master_Transmit_IT", "code": "HAL_StatusTypeDef HAL_FMPI2C_Master_Transmit_IT(FMPI2C_HandleTypeDef *hfmpi2c, uint16_t DevAddress, uint8_t *pData,\n                                             uint16_t Size)\n{\n  uint32_t xfermode;\n\n  if (hfmpi2c->State == HAL_FMPI2C_STATE_READY)\n  {\n    if (__HAL_FMPI2C_GET_FLAG(hfmpi2c, FMPI2C_FLAG_BUSY) == SET)\n    {\n      return HAL_BUSY;\n    }\n\n    /* Process Locked */\n    __HAL_LOCK(hfmpi2c);\n\n    hfmpi2c->State       = HAL_FMPI2C_STATE_BUSY_TX;\n    hfmpi2c->Mode        = HAL_FMPI2C_MODE_MASTER;\n    hfmpi2c->ErrorCode   = HAL_FMPI2C_ERROR_NONE;\n\n    /* Prepare transfer parameters */\n    hfmpi2c->pBuffPtr    = pData;\n    hfmpi2c->XferCount   = Size;\n    hfmpi2c->XferOptions = FMPI2C_NO_OPTION_FRAME;\n    hfmpi2c->XferISR     = FMPI2C_Master_ISR_IT;\n\n    if (hfmpi2c->XferCount > MAX_NBYTE_SIZE)\n    {\n      hfmpi2c->XferSize = MAX_NBYTE_SIZE;\n      xfermode = FMPI2C_RELOAD_MODE;\n    }\n    else\n    {\n      hfmpi2c->XferSize = hfmpi2c->XferCount;\n      xfermode = FMPI2C_AUTOEND_MODE;\n    }\n\n    /* Send Slave Address */\n    /* Set NBYTES to write and reload if hfmpi2c->XferCount > MAX_NBYTE_SIZE */\n    FMPI2C_TransferConfig(hfmpi2c, DevAddress, (uint8_t)hfmpi2c->XferSize, xfermode, FMPI2C_GENERATE_START_WRITE);\n\n    /* Process Unlocked */\n    __HAL_UNLOCK(hfmpi2c);\n\n    /* Note : The FMPI2C interrupts must be enabled after unlocking current process\n              to avoid the risk of FMPI2C interrupt handle execution before current\n              process unlock */\n\n    /* Enable ERR, TC, STOP, NACK, TXI interrupt */\n    /* possible to enable all of these */\n    /* FMPI2C_IT_ERRI | FMPI2C_IT_TCI | FMPI2C_IT_STOPI | FMPI2C_IT_NACKI |\n      FMPI2C_IT_ADDRI | FMPI2C_IT_RXI | FMPI2C_IT_TXI */\n    FMPI2C_Enable_IRQ(hfmpi2c, FMPI2C_XFER_TX_IT);\n\n    return HAL_OK;\n  }\n  else\n  {\n    return HAL_BUSY;\n  }\n}", "docstring": "/**\n  * @brief  Transmit in master mode an amount of data in non-blocking mode with Interrupt\n  * @param  hfmpi2c Pointer to a FMPI2C_HandleTypeDef structure that contains\n  *                the configuration information for the specified FMPI2C.\n  * @param  DevAddress Target device address: The device 7 bits address value\n  *         in datasheet must be shifted to the left before calling the interface\n  * @param  pData Pointer to data buffer\n  * @param  Size Amount of data to be sent\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_fmpi2c.c#L1614-L1673", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_I2SEx_TransmitReceive_DMA", "code": "HAL_StatusTypeDef HAL_I2SEx_TransmitReceive_DMA(I2S_HandleTypeDef *hi2s,\n                                                uint16_t *pTxData,\n                                                uint16_t *pRxData,\n                                                uint16_t Size)\n{\n  uint32_t *tmp = NULL;\n  uint32_t tmp1 = 0U;\n  HAL_StatusTypeDef errorcode = HAL_OK;\n\n  if (hi2s->State != HAL_I2S_STATE_READY)\n  {\n    errorcode = HAL_BUSY;\n    goto error;\n  }\n\n  if ((pTxData == NULL) || (pRxData == NULL) || (Size == 0U))\n  {\n    return  HAL_ERROR;\n  }\n\n  /* Process Locked */\n  __HAL_LOCK(hi2s);\n\n  hi2s->pTxBuffPtr = pTxData;\n  hi2s->pRxBuffPtr = pRxData;\n\n  tmp1 = hi2s->Instance->I2SCFGR & (SPI_I2SCFGR_DATLEN | SPI_I2SCFGR_CHLEN);\n  /* Check the Data format: When a 16-bit data frame or a 16-bit data frame extended\n  is selected during the I2S configuration phase, the Size parameter means the number\n  of 16-bit data length in the transaction and when a 24-bit data frame or a 32-bit data\n  frame is selected the Size parameter means the number of 16-bit data length. */\n  if ((tmp1 == I2S_DATAFORMAT_24B) || (tmp1 == I2S_DATAFORMAT_32B))\n  {\n    hi2s->TxXferSize  = (Size << 1U);\n    hi2s->TxXferCount = (Size << 1U);\n    hi2s->RxXferSize  = (Size << 1U);\n    hi2s->RxXferCount = (Size << 1U);\n  }\n  else\n  {\n    hi2s->TxXferSize  = Size;\n    hi2s->TxXferCount = Size;\n    hi2s->RxXferSize  = Size;\n    hi2s->RxXferCount = Size;\n  }\n\n  hi2s->ErrorCode = HAL_I2S_ERROR_NONE;\n  hi2s->State     = HAL_I2S_STATE_BUSY_TX_RX;\n\n  /* Set the I2S Rx DMA Half transfer complete callback */\n  hi2s->hdmarx->XferHalfCpltCallback = I2SEx_TxRxDMAHalfCplt;\n\n  /* Set the I2S Rx DMA transfer complete callback */\n  hi2s->hdmarx->XferCpltCallback  = I2SEx_TxRxDMACplt;\n\n  /* Set the I2S Rx DMA error callback */\n  hi2s->hdmarx->XferErrorCallback = I2SEx_TxRxDMAError;\n\n  /* Set the I2S Tx DMA Half transfer complete callback as NULL */\n  hi2s->hdmatx->XferHalfCpltCallback  = NULL;\n\n  /* Set the I2S Tx DMA transfer complete callback as NULL */\n  hi2s->hdmatx->XferCpltCallback  = NULL;\n\n  /* Set the I2S Tx DMA error callback */\n  hi2s->hdmatx->XferErrorCallback = I2SEx_TxRxDMAError;\n\n  tmp1 = hi2s->Instance->I2SCFGR & SPI_I2SCFGR_I2SCFG;\n  /* Check if the I2S_MODE_MASTER_TX or I2S_MODE_SLAVE_TX Mode is selected */\n  if ((tmp1 == I2S_MODE_MASTER_TX) || (tmp1 == I2S_MODE_SLAVE_TX))\n  {\n    /* Enable the Rx DMA Stream */\n    tmp = (uint32_t *)&pRxData;\n    HAL_DMA_Start_IT(hi2s->hdmarx, (uint32_t)&I2SxEXT(hi2s->Instance)->DR, *(uint32_t *)tmp, hi2s->RxXferSize);\n\n    /* Enable Rx DMA Request */\n    SET_BIT(I2SxEXT(hi2s->Instance)->CR2, SPI_CR2_RXDMAEN);\n\n    /* Enable the Tx DMA Stream */\n    tmp = (uint32_t *)&pTxData;\n    HAL_DMA_Start_IT(hi2s->hdmatx, *(uint32_t *)tmp, (uint32_t)&hi2s->Instance->DR, hi2s->TxXferSize);\n\n    /* Enable Tx DMA Request */\n    SET_BIT(hi2s->Instance->CR2, SPI_CR2_TXDMAEN);\n\n    /* Check if the I2S is already enabled */\n    if ((hi2s->Instance->I2SCFGR & SPI_I2SCFGR_I2SE) != SPI_I2SCFGR_I2SE)\n    {\n      /* Enable I2Sext(receiver) before enabling I2Sx peripheral */\n      __HAL_I2SEXT_ENABLE(hi2s);\n\n      /* Enable I2S peripheral after the I2Sext */\n      __HAL_I2S_ENABLE(hi2s);\n    }\n  }\n  else\n  {\n    /* Check if Master Receiver mode is selected */\n    if ((hi2s->Instance->I2SCFGR & SPI_I2SCFGR_I2SCFG) == I2S_MODE_MASTER_RX)\n    {\n      /* Clear the Overrun Flag by a read operation on the SPI_DR register followed by a read\n      access to the SPI_SR register. */\n      __HAL_I2S_CLEAR_OVRFLAG(hi2s);\n    }\n    /* Enable the Tx DMA Stream */\n    tmp = (uint32_t *)&pTxData;\n    HAL_DMA_Start_IT(hi2s->hdmatx, *(uint32_t *)tmp, (uint32_t)&I2SxEXT(hi2s->Instance)->DR, hi2s->TxXferSize);\n\n    /* Enable Tx DMA Request */\n    SET_BIT(I2SxEXT(hi2s->Instance)->CR2, SPI_CR2_TXDMAEN);\n\n    /* Enable the Rx DMA Stream */\n    tmp = (uint32_t *)&pRxData;\n    HAL_DMA_Start_IT(hi2s->hdmarx, (uint32_t)&hi2s->Instance->DR, *(uint32_t *)tmp, hi2s->RxXferSize);\n\n    /* Enable Rx DMA Request */\n    SET_BIT(hi2s->Instance->CR2, SPI_CR2_RXDMAEN);\n\n    /* Check if the I2S is already enabled */\n    if ((hi2s->Instance->I2SCFGR & SPI_I2SCFGR_I2SE) != SPI_I2SCFGR_I2SE)\n    {\n      /* Enable I2Sext(transmitter) before enabling I2Sx peripheral */\n      __HAL_I2SEXT_ENABLE(hi2s);\n      /* Enable I2S peripheral before the I2Sext */\n      __HAL_I2S_ENABLE(hi2s);\n    }\n  }\n\nerror :\n  __HAL_UNLOCK(hi2s);\n  return errorcode;\n}", "docstring": "/**\n  * @brief  Full-Duplex Transmit/Receive data in non-blocking mode using DMA\n  * @param  hi2s pointer to a I2S_HandleTypeDef structure that contains\n  *         the configuration information for I2S module\n  * @param  pTxData a 16-bit pointer to the Transmit data buffer.\n  * @param  pRxData a 16-bit pointer to the Receive data buffer.\n  * @param  Size number of data sample to be sent:\n  * @note   When a 16-bit data frame or a 16-bit data frame extended is selected during the I2S\n  *         configuration phase, the Size parameter means the number of 16-bit data length\n  *         in the transaction and when a 24-bit data frame or a 32-bit data frame is selected\n  *         the Size parameter means the number of 16-bit data length.\n  * @note   The I2S is kept enabled at the end of transaction to avoid the clock de-synchronization\n  *         between Master and Slave(example: audio streaming).\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_i2s_ex.c#L541-L672", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_LTDC_EnableDither", "code": "HAL_StatusTypeDef HAL_LTDC_EnableDither(LTDC_HandleTypeDef *hltdc)\n{\n  /* Process locked */\n  __HAL_LOCK(hltdc);\n\n  /* Change LTDC peripheral state */\n  hltdc->State = HAL_LTDC_STATE_BUSY;\n\n  /* Enable Dither by setting DTEN bit */\n  LTDC->GCR |= (uint32_t)LTDC_GCR_DEN;\n\n  /* Change the LTDC state*/\n  hltdc->State = HAL_LTDC_STATE_READY;\n\n  /* Process unlocked */\n  __HAL_UNLOCK(hltdc);\n\n  return HAL_OK;\n}", "docstring": "/**\n  * @brief  Enable Dither.\n  * @param  hltdc  pointer to a LTDC_HandleTypeDef structure that contains\n  *                the configuration information for the LTDC.\n  * @retval  HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_ltdc.c#L1059-L1077", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_PCCARD_ITCallback", "code": "__weak void HAL_PCCARD_ITCallback(PCCARD_HandleTypeDef *hpccard)\n{\n  /* Prevent unused argument(s) compilation warning */\n  UNUSED(hpccard);\n  /* NOTE : This function Should not be modified, when the callback is needed,\n            the HAL_PCCARD_ITCallback could be implemented in the user file\n   */\n}", "docstring": "/**\n  * @brief  PCCARD interrupt feature callback\n  * @param  hpccard pointer to a PCCARD_HandleTypeDef structure that contains\n  *                the configuration information for PCCARD module.\n  * @retval None\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_pccard.c#L678-L685", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_PWREx_DisableMainRegulatorLowVoltage", "code": "void HAL_PWREx_DisableMainRegulatorLowVoltage(void)\n{\n  *(__IO uint32_t *) CR_MRLVDS_BB = (uint32_t)DISABLE;\n}", "docstring": "/**\n  * @brief Disables Main Regulator low voltage mode.\n  * @note  This mode is only available for STM32F401xx/STM32F410xx/STM32F411xx/STM32F412Zx/STM32F412Rx/STM32F412Vx/STM32F412Cx/\n  *        STM32F413xx/STM32F423xxdevices. \n  * @retval None\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_pwr_ex.c#L372-L375", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_RNG_ReadLastRandomNumber", "code": "uint32_t HAL_RNG_ReadLastRandomNumber(RNG_HandleTypeDef *hrng)\n{\n  return (hrng->RandomNumber);\n}", "docstring": "/**\n  * @brief  Read latest generated random number.\n  * @param  hrng pointer to a RNG_HandleTypeDef structure that contains\n  *                the configuration information for RNG.\n  * @retval random value\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_rng.c#L771-L774", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_SMBUS_UnRegisterCallback", "code": "HAL_StatusTypeDef HAL_SMBUS_UnRegisterCallback(SMBUS_HandleTypeDef *hsmbus, HAL_SMBUS_CallbackIDTypeDef CallbackID)\n{\n  HAL_StatusTypeDef status = HAL_OK;\n\n  /* Process locked */\n  __HAL_LOCK(hsmbus);\n\n  if (HAL_SMBUS_STATE_READY == hsmbus->State)\n  {\n    switch (CallbackID)\n    {\n      case HAL_SMBUS_MASTER_TX_COMPLETE_CB_ID :\n        hsmbus->MasterTxCpltCallback = HAL_SMBUS_MasterTxCpltCallback; /* Legacy weak MasterTxCpltCallback */\n        break;\n\n      case HAL_SMBUS_MASTER_RX_COMPLETE_CB_ID :\n        hsmbus->MasterRxCpltCallback = HAL_SMBUS_MasterRxCpltCallback; /* Legacy weak MasterRxCpltCallback */\n        break;\n\n      case HAL_SMBUS_SLAVE_TX_COMPLETE_CB_ID :\n        hsmbus->SlaveTxCpltCallback = HAL_SMBUS_SlaveTxCpltCallback;   /* Legacy weak SlaveTxCpltCallback  */\n        break;\n\n      case HAL_SMBUS_SLAVE_RX_COMPLETE_CB_ID :\n        hsmbus->SlaveRxCpltCallback = HAL_SMBUS_SlaveRxCpltCallback;   /* Legacy weak SlaveRxCpltCallback  */\n        break;\n\n      case HAL_SMBUS_LISTEN_COMPLETE_CB_ID :\n        hsmbus->ListenCpltCallback = HAL_SMBUS_ListenCpltCallback;     /* Legacy weak ListenCpltCallback   */\n        break;\n\n      case HAL_SMBUS_ERROR_CB_ID :\n        hsmbus->ErrorCallback = HAL_SMBUS_ErrorCallback;               /* Legacy weak ErrorCallback        */\n        break;\n\n      case HAL_SMBUS_ABORT_CB_ID :\n        hsmbus->AbortCpltCallback = HAL_SMBUS_AbortCpltCallback;       /* Legacy weak AbortCpltCallback    */\n        break;\n\n      case HAL_SMBUS_MSPINIT_CB_ID :\n        hsmbus->MspInitCallback = HAL_SMBUS_MspInit;                   /* Legacy weak MspInit              */\n        break;\n\n      case HAL_SMBUS_MSPDEINIT_CB_ID :\n        hsmbus->MspDeInitCallback = HAL_SMBUS_MspDeInit;               /* Legacy weak MspDeInit            */\n        break;\n\n      default :\n        /* Update the error code */\n        hsmbus->ErrorCode |= HAL_SMBUS_ERROR_INVALID_CALLBACK;\n\n        /* Return error status */\n        status =  HAL_ERROR;\n        break;\n    }\n  }\n  else if (HAL_SMBUS_STATE_RESET == hsmbus->State)\n  {\n    switch (CallbackID)\n    {\n      case HAL_SMBUS_MSPINIT_CB_ID :\n        hsmbus->MspInitCallback = HAL_SMBUS_MspInit;                   /* Legacy weak MspInit              */\n        break;\n\n      case HAL_SMBUS_MSPDEINIT_CB_ID :\n        hsmbus->MspDeInitCallback = HAL_SMBUS_MspDeInit;               /* Legacy weak MspDeInit            */\n        break;\n\n      default :\n        /* Update the error code */\n        hsmbus->ErrorCode |= HAL_SMBUS_ERROR_INVALID_CALLBACK;\n\n        /* Return error status */\n        status =  HAL_ERROR;\n        break;\n    }\n  }\n  else\n  {\n    /* Update the error code */\n    hsmbus->ErrorCode |= HAL_SMBUS_ERROR_INVALID_CALLBACK;\n\n    /* Return error status */\n    status =  HAL_ERROR;\n  }\n\n  /* Release Lock */\n  __HAL_UNLOCK(hsmbus);\n  return status;\n}", "docstring": "/**\n  * @brief  Unregister an SMBUS Callback\n  *         SMBUS callback is redirected to the weak predefined callback\n  * @param  hsmbus Pointer to a SMBUS_HandleTypeDef structure that contains\n  *                the configuration information for the specified SMBUS.\n  * @param  CallbackID ID of the callback to be unregistered\n  *         This parameter can be one of the following values:\n  *         This parameter can be one of the following values:\n  *          @arg @ref HAL_SMBUS_MASTER_TX_COMPLETE_CB_ID Master Tx Transfer completed callback ID\n  *          @arg @ref HAL_SMBUS_MASTER_RX_COMPLETE_CB_ID Master Rx Transfer completed callback ID\n  *          @arg @ref HAL_SMBUS_SLAVE_TX_COMPLETE_CB_ID Slave Tx Transfer completed callback ID\n  *          @arg @ref HAL_SMBUS_SLAVE_RX_COMPLETE_CB_ID Slave Rx Transfer completed callback ID\n  *          @arg @ref HAL_SMBUS_LISTEN_COMPLETE_CB_ID Listen Complete callback ID\n  *          @arg @ref HAL_SMBUS_ERROR_CB_ID Error callback ID\n  *          @arg @ref HAL_SMBUS_ABORT_CB_ID Abort callback ID\n  *          @arg @ref HAL_SMBUS_MSPINIT_CB_ID MspInit callback ID\n  *          @arg @ref HAL_SMBUS_MSPDEINIT_CB_ID MspDeInit callback ID\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_hal_smbus.c#L687-L776", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LL_DMA_Init", "code": "uint32_t LL_DMA_Init(DMA_TypeDef *DMAx, uint32_t Stream, LL_DMA_InitTypeDef *DMA_InitStruct)\n{\n  /* Check the DMA Instance DMAx and Stream parameters*/\n  assert_param(IS_LL_DMA_ALL_STREAM_INSTANCE(DMAx, Stream));\n\n  /* Check the DMA parameters from DMA_InitStruct */\n  assert_param(IS_LL_DMA_DIRECTION(DMA_InitStruct->Direction));\n  assert_param(IS_LL_DMA_MODE(DMA_InitStruct->Mode));\n  assert_param(IS_LL_DMA_PERIPHINCMODE(DMA_InitStruct->PeriphOrM2MSrcIncMode));\n  assert_param(IS_LL_DMA_MEMORYINCMODE(DMA_InitStruct->MemoryOrM2MDstIncMode));\n  assert_param(IS_LL_DMA_PERIPHDATASIZE(DMA_InitStruct->PeriphOrM2MSrcDataSize));\n  assert_param(IS_LL_DMA_MEMORYDATASIZE(DMA_InitStruct->MemoryOrM2MDstDataSize));\n  assert_param(IS_LL_DMA_NBDATA(DMA_InitStruct->NbData));\n  assert_param(IS_LL_DMA_CHANNEL(DMA_InitStruct->Channel));\n  assert_param(IS_LL_DMA_PRIORITY(DMA_InitStruct->Priority));\n  assert_param(IS_LL_DMA_FIFO_MODE_STATE(DMA_InitStruct->FIFOMode));\n  /* Check the memory burst, peripheral burst and FIFO threshold parameters only\n     when FIFO mode is enabled */\n  if(DMA_InitStruct->FIFOMode != LL_DMA_FIFOMODE_DISABLE)\n  {\n    assert_param(IS_LL_DMA_FIFO_THRESHOLD(DMA_InitStruct->FIFOThreshold));\n    assert_param(IS_LL_DMA_MEMORY_BURST(DMA_InitStruct->MemBurst));\n    assert_param(IS_LL_DMA_PERIPHERAL_BURST(DMA_InitStruct->PeriphBurst));\n  }\n\n  /*---------------------------- DMAx SxCR Configuration ------------------------\n   * Configure DMAx_Streamy: data transfer direction, data transfer mode,\n   *                          peripheral and memory increment mode,\n   *                          data size alignment and  priority level with parameters :\n   * - Direction:      DMA_SxCR_DIR[1:0] bits\n   * - Mode:           DMA_SxCR_CIRC bit\n   * - PeriphOrM2MSrcIncMode:  DMA_SxCR_PINC bit\n   * - MemoryOrM2MDstIncMode:  DMA_SxCR_MINC bit\n   * - PeriphOrM2MSrcDataSize: DMA_SxCR_PSIZE[1:0] bits\n   * - MemoryOrM2MDstDataSize: DMA_SxCR_MSIZE[1:0] bits\n   * - Priority:               DMA_SxCR_PL[1:0] bits\n   */\n  LL_DMA_ConfigTransfer(DMAx, Stream, DMA_InitStruct->Direction | \\\n                        DMA_InitStruct->Mode                    | \\\n                        DMA_InitStruct->PeriphOrM2MSrcIncMode   | \\\n                        DMA_InitStruct->MemoryOrM2MDstIncMode   | \\\n                        DMA_InitStruct->PeriphOrM2MSrcDataSize  | \\\n                        DMA_InitStruct->MemoryOrM2MDstDataSize  | \\\n                        DMA_InitStruct->Priority\n                        );\n\n  if(DMA_InitStruct->FIFOMode != LL_DMA_FIFOMODE_DISABLE)\n  {\n    /*---------------------------- DMAx SxFCR Configuration ------------------------\n     * Configure DMAx_Streamy:  fifo mode and fifo threshold with parameters :\n     * - FIFOMode:                DMA_SxFCR_DMDIS bit\n     * - FIFOThreshold:           DMA_SxFCR_FTH[1:0] bits\n     */\n    LL_DMA_ConfigFifo(DMAx, Stream, DMA_InitStruct->FIFOMode, DMA_InitStruct->FIFOThreshold);   \n\n    /*---------------------------- DMAx SxCR Configuration --------------------------\n     * Configure DMAx_Streamy:  memory burst transfer with parameters :\n     * - MemBurst:                DMA_SxCR_MBURST[1:0] bits\n     */\n    LL_DMA_SetMemoryBurstxfer(DMAx,Stream,DMA_InitStruct->MemBurst); \n\n    /*---------------------------- DMAx SxCR Configuration --------------------------\n     * Configure DMAx_Streamy:  peripheral burst transfer with parameters :\n     * - PeriphBurst:             DMA_SxCR_PBURST[1:0] bits\n     */\n    LL_DMA_SetPeriphBurstxfer(DMAx,Stream,DMA_InitStruct->PeriphBurst);\n  }\n\n  /*-------------------------- DMAx SxM0AR Configuration --------------------------\n   * Configure the memory or destination base address with parameter :\n   * - MemoryOrM2MDstAddress:     DMA_SxM0AR_M0A[31:0] bits\n   */\n  LL_DMA_SetMemoryAddress(DMAx, Stream, DMA_InitStruct->MemoryOrM2MDstAddress);\n\n  /*-------------------------- DMAx SxPAR Configuration ---------------------------\n   * Configure the peripheral or source base address with parameter :\n   * - PeriphOrM2MSrcAddress:     DMA_SxPAR_PA[31:0] bits\n   */\n  LL_DMA_SetPeriphAddress(DMAx, Stream, DMA_InitStruct->PeriphOrM2MSrcAddress);\n\n  /*--------------------------- DMAx SxNDTR Configuration -------------------------\n   * Configure the peripheral base address with parameter :\n   * - NbData:                    DMA_SxNDT[15:0] bits\n   */\n  LL_DMA_SetDataLength(DMAx, Stream, DMA_InitStruct->NbData);\n\n  /*--------------------------- DMA SxCR_CHSEL Configuration ----------------------\n   * Configure the peripheral base address with parameter :\n   * - PeriphRequest:             DMA_SxCR_CHSEL[2:0] bits\n   */\n  LL_DMA_SetChannelSelection(DMAx, Stream, DMA_InitStruct->Channel);\n\n  return SUCCESS;\n}", "docstring": "/**\n  * @brief  Initialize the DMA registers according to the specified parameters in DMA_InitStruct.\n  * @note   To convert DMAx_Streamy Instance to DMAx Instance and Streamy, use helper macros :\n  *         @arg @ref __LL_DMA_GET_INSTANCE\n  *         @arg @ref __LL_DMA_GET_STREAM\n  * @param  DMAx DMAx Instance\n  * @param  Stream This parameter can be one of the following values:\n  *         @arg @ref LL_DMA_STREAM_0\n  *         @arg @ref LL_DMA_STREAM_1\n  *         @arg @ref LL_DMA_STREAM_2\n  *         @arg @ref LL_DMA_STREAM_3\n  *         @arg @ref LL_DMA_STREAM_4\n  *         @arg @ref LL_DMA_STREAM_5\n  *         @arg @ref LL_DMA_STREAM_6\n  *         @arg @ref LL_DMA_STREAM_7\n  * @param  DMA_InitStruct pointer to a @ref LL_DMA_InitTypeDef structure.\n  * @retval An ErrorStatus enumeration value:\n  *          - SUCCESS: DMA registers are initialized\n  *          - ERROR: Not applicable\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_ll_dma.c#L285-L378", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FMC_NAND_Init", "code": "HAL_StatusTypeDef FMC_NAND_Init(FMC_NAND_TypeDef *Device, FMC_NAND_InitTypeDef *Init)\n{\n  uint32_t tmpr  = 0U; \n    \n  /* Check the parameters */\n  assert_param(IS_FMC_NAND_DEVICE(Device));\n  assert_param(IS_FMC_NAND_BANK(Init->NandBank));\n  assert_param(IS_FMC_WAIT_FEATURE(Init->Waitfeature));\n  assert_param(IS_FMC_NAND_MEMORY_WIDTH(Init->MemoryDataWidth));\n  assert_param(IS_FMC_ECC_STATE(Init->EccComputation));\n  assert_param(IS_FMC_ECCPAGE_SIZE(Init->ECCPageSize));\n  assert_param(IS_FMC_TCLR_TIME(Init->TCLRSetupTime));\n  assert_param(IS_FMC_TAR_TIME(Init->TARSetupTime));   \n\n  if(Init->NandBank == FMC_NAND_BANK2)\n  {\n    /* Get the NAND bank 2 register value */\n    tmpr = Device->PCR2;\n  }\n  else\n  {\n    /* Get the NAND bank 3 register value */\n    tmpr = Device->PCR3;\n  }\n  \n  /* Clear PWAITEN, PBKEN, PTYP, PWID, ECCEN, TCLR, TAR and ECCPS bits */\n  tmpr &= ((uint32_t)~(FMC_PCR2_PWAITEN  | FMC_PCR2_PBKEN | FMC_PCR2_PTYP | \\\n                       FMC_PCR2_PWID | FMC_PCR2_ECCEN | FMC_PCR2_TCLR | \\\n                       FMC_PCR2_TAR | FMC_PCR2_ECCPS));  \n  \n  /* Set NAND device control parameters */\n  tmpr |= (uint32_t)(Init->Waitfeature                 |\\\n                     FMC_PCR_MEMORY_TYPE_NAND          |\\\n                     Init->MemoryDataWidth             |\\\n                     Init->EccComputation              |\\\n                     Init->ECCPageSize                 |\\\n                     ((Init->TCLRSetupTime) << 9U)     |\\\n                     ((Init->TARSetupTime) << 13U));   \n  \n  if(Init->NandBank == FMC_NAND_BANK2)\n  {\n    /* NAND bank 2 registers configuration */\n    Device->PCR2  = tmpr;\n  }\n  else\n  {\n    /* NAND bank 3 registers configuration */\n    Device->PCR3  = tmpr;\n  }\n  \n  return HAL_OK;\n\n}", "docstring": "/* defined(STM32F427xx) || defined(STM32F437xx) || defined(STM32F429xx) || defined(STM32F439xx) */\n/** @defgroup HAL_FMC_NAND_Group1 Initialization/de-initialization functions \n *  @brief    Initialization and Configuration functions \n *\n@verbatim    \n  ==============================================================================\n              ##### Initialization and de_initialization functions #####\n  ==============================================================================\n  [..]  \n    This section provides functions allowing to:\n    (+) Initialize and configure the FMC NAND interface\n    (+) De-initialize the FMC NAND interface \n    (+) Configure the FMC clock and associated GPIOs\n        \n@endverbatim\n  * @{\n  */\n/**\n  * @brief  Initializes the FMC_NAND device according to the specified\n  *         control parameters in the FMC_NAND_HandleTypeDef\n  * @param  Device Pointer to NAND device instance\n  * @param  Init Pointer to NAND Initialization structure\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_ll_fmc.c#L754-L806", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LL_LPTIM_StructInit", "code": "void LL_LPTIM_StructInit(LL_LPTIM_InitTypeDef *LPTIM_InitStruct)\n{\n  /* Set the default configuration */\n  LPTIM_InitStruct->ClockSource = LL_LPTIM_CLK_SOURCE_INTERNAL;\n  LPTIM_InitStruct->Prescaler   = LL_LPTIM_PRESCALER_DIV1;\n  LPTIM_InitStruct->Waveform    = LL_LPTIM_OUTPUT_WAVEFORM_PWM;\n  LPTIM_InitStruct->Polarity    = LL_LPTIM_OUTPUT_POLARITY_REGULAR;\n}", "docstring": "/**\n  * @brief  Set each fields of the LPTIM_InitStruct structure to its default\n  *         value.\n  * @param  LPTIM_InitStruct pointer to a @ref LL_LPTIM_InitTypeDef structure\n  * @retval None\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_ll_lptim.c#L122-L129", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "USB_DisableGlobalInt", "code": "HAL_StatusTypeDef USB_DisableGlobalInt(USB_OTG_GlobalTypeDef *USBx)\n{\n  USBx->GAHBCFG &= ~USB_OTG_GAHBCFG_GINT;\n  return HAL_OK;\n}", "docstring": "/**\n  * @brief  USB_DisableGlobalInt\n  *         Disable the controller's Global Int in the AHB Config reg\n  * @param  USBx  Selected device\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_ll_usb.c#L234-L238", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "USB_ResetPort", "code": "HAL_StatusTypeDef USB_ResetPort(USB_OTG_GlobalTypeDef *USBx)\n{\n  uint32_t USBx_BASE = (uint32_t)USBx;\n\n  __IO uint32_t hprt0 = 0U;\n\n  hprt0 = USBx_HPRT0;\n\n  hprt0 &= ~(USB_OTG_HPRT_PENA | USB_OTG_HPRT_PCDET |\n             USB_OTG_HPRT_PENCHNG | USB_OTG_HPRT_POCCHNG);\n\n  USBx_HPRT0 = (USB_OTG_HPRT_PRST | hprt0);\n  HAL_Delay(100U);                                 /* See Note #1 */\n  USBx_HPRT0 = ((~USB_OTG_HPRT_PRST) & hprt0);\n  HAL_Delay(10U);\n\n  return HAL_OK;\n}", "docstring": "/**\n  * @brief  USB_OTG_ResetPort : Reset Host Port\n  * @param  USBx  Selected device\n  * @retval HAL status\n  * @note (1)The application must wait at least 10 ms\n  *   before clearing the reset bit.\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Drivers/STM32F4xx_HAL_Driver/Src/stm32f4xx_ll_usb.c#L1532-L1549", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mpu_set_accel_fsr", "code": "int mpu_set_accel_fsr(unsigned char fsr)\n{\n    unsigned char data;\n\n    if (!(st.chip_cfg.sensors))\n        return -1;\n\n    switch (fsr) {\n    case 2:\n        data = INV_FSR_2G << 3;\n        break;\n    case 4:\n        data = INV_FSR_4G << 3;\n        break;\n    case 8:\n        data = INV_FSR_8G << 3;\n        break;\n    case 16:\n        data = INV_FSR_16G << 3;\n        break;\n    default:\n        return -1;\n    }\n\n    if (st.chip_cfg.accel_fsr == (data >> 3))\n        return 0;\n    if (i2c_write( st.reg->accel_cfg, 1, &data))\n        return -1;\n    st.chip_cfg.accel_fsr = data >> 3;\n    return 0;\n}", "docstring": "/**\n *  @brief      Set the accel full-scale range.\n *  @param[in]  fsr Desired full-scale range.\n *  @return     0 if successful.\n */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/GearLL/Drivers/Devices/MPU6050/inv_mpu.c#L1248-L1278", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mpu_set_int_level", "code": "int mpu_set_int_level(unsigned char active_low)\n{\n    st.chip_cfg.active_low_int = active_low;\n    return 0;\n}", "docstring": "/**\n *  @brief      Set interrupt level.\n *  @param[in]  active_low  1 for active low, 0 for active high.\n *  @return     0 if successful.\n */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/GearLL/Drivers/Devices/MPU6050/inv_mpu.c#L1869-L1873", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mpu_get_compass_fsr", "code": "int mpu_get_compass_fsr(unsigned short *fsr)\n{\n#ifdef AK89xx_SECONDARY\n    fsr[0] = st.hw->compass_fsr;\n    return 0;\n#else\n    return -1;\n#endif\n}", "docstring": "/**\n *  @brief      Get the compass full-scale range.\n *  @param[out] fsr Current full-scale range.\n *  @return     0 if successful.\n */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/GearLL/Drivers/Devices/MPU6050/inv_mpu.c#L2590-L2598", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SEGGER_SYSVIEW_RecordU32x4", "code": "void SEGGER_SYSVIEW_RecordU32x4(unsigned int EventID, U32 Para0, U32 Para1, U32 Para2, U32 Para3) {\n  U8* pPayload;\n  U8* pPayloadStart;\n  RECORD_START(SEGGER_SYSVIEW_INFO_SIZE + 4 * SEGGER_SYSVIEW_QUANTA_U32);\n  //\n  pPayload = pPayloadStart;\n  ENCODE_U32(pPayload, Para0);\n  ENCODE_U32(pPayload, Para1);\n  ENCODE_U32(pPayload, Para2);\n  ENCODE_U32(pPayload, Para3);\n  _SendPacket(pPayloadStart, pPayload, EventID);\n  RECORD_END();\n}", "docstring": "/*********************************************************************\n*\n*       SEGGER_SYSVIEW_RecordU32x4()\n*\n*  Function description\n*    Formats and sends a SystemView packet containing 4 U32 parameter payload.\n*\n*  Parameters\n*    EventID - SystemView event ID.\n*    Para0   - The 32-bit parameter encoded to SystemView packet payload.\n*    Para1   - The 32-bit parameter encoded to SystemView packet payload.\n*    Para2   - The 32-bit parameter encoded to SystemView packet payload.\n*    Para3   - The 32-bit parameter encoded to SystemView packet payload.\n*/", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone-F4/Middlewares/SEGGER/SEGGER_SYSVIEW.c#L1389-L1401", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "I2C_MasterReceive_RXNE", "code": "static void I2C_MasterReceive_RXNE(I2C_HandleTypeDef *hi2c)\n{\n  if (hi2c->State == HAL_I2C_STATE_BUSY_RX)\n  {\n    uint32_t tmp;\n\n    tmp = hi2c->XferCount;\n    if (tmp > 3U)\n    {\n      /* Read data from DR */\n      *hi2c->pBuffPtr = (uint8_t)hi2c->Instance->DR;\n\n      /* Increment Buffer pointer */\n      hi2c->pBuffPtr++;\n\n      /* Update counter */\n      hi2c->XferCount--;\n\n      if (hi2c->XferCount == (uint16_t)3)\n      {\n        /* Disable BUF interrupt, this help to treat correctly the last 4 bytes\n        on BTF subroutine */\n        /* Disable BUF interrupt */\n        __HAL_I2C_DISABLE_IT(hi2c, I2C_IT_BUF);\n      }\n    }\n    else if ((hi2c->XferOptions != I2C_FIRST_AND_NEXT_FRAME) && ((tmp == 1U) || (tmp == 0U)))\n    {\n      if (I2C_WaitOnSTOPRequestThroughIT(hi2c) == HAL_OK)\n      {\n        /* Disable Acknowledge */\n        CLEAR_BIT(hi2c->Instance->CR1, I2C_CR1_ACK);\n\n        /* Disable EVT, BUF and ERR interrupt */\n        __HAL_I2C_DISABLE_IT(hi2c, I2C_IT_EVT | I2C_IT_BUF | I2C_IT_ERR);\n\n        /* Read data from DR */\n        *hi2c->pBuffPtr = (uint8_t)hi2c->Instance->DR;\n\n        /* Increment Buffer pointer */\n        hi2c->pBuffPtr++;\n\n        /* Update counter */\n        hi2c->XferCount--;\n\n        hi2c->State = HAL_I2C_STATE_READY;\n\n        if (hi2c->Mode == HAL_I2C_MODE_MEM)\n        {\n          hi2c->Mode = HAL_I2C_MODE_NONE;\n          hi2c->PreviousState = I2C_STATE_NONE;\n\n#if (USE_HAL_I2C_REGISTER_CALLBACKS == 1)\n          hi2c->MemRxCpltCallback(hi2c);\n#else\n          HAL_I2C_MemRxCpltCallback(hi2c);\n#endif /* USE_HAL_I2C_REGISTER_CALLBACKS */\n        }\n        else\n        {\n          hi2c->Mode = HAL_I2C_MODE_NONE;\n          hi2c->PreviousState = I2C_STATE_MASTER_BUSY_RX;\n\n#if (USE_HAL_I2C_REGISTER_CALLBACKS == 1)\n          hi2c->MasterRxCpltCallback(hi2c);\n#else\n          HAL_I2C_MasterRxCpltCallback(hi2c);\n#endif /* USE_HAL_I2C_REGISTER_CALLBACKS */\n        }\n      }\n      else\n      {\n        /* Disable EVT, BUF and ERR interrupt */\n        __HAL_I2C_DISABLE_IT(hi2c, I2C_IT_EVT | I2C_IT_BUF | I2C_IT_ERR);\n\n        /* Read data from DR */\n        *hi2c->pBuffPtr = (uint8_t)hi2c->Instance->DR;\n\n        /* Increment Buffer pointer */\n        hi2c->pBuffPtr++;\n\n        /* Update counter */\n        hi2c->XferCount--;\n\n        hi2c->State = HAL_I2C_STATE_READY;\n        hi2c->Mode = HAL_I2C_MODE_NONE;\n\n        /* Call user error callback */\n#if (USE_HAL_I2C_REGISTER_CALLBACKS == 1)\n        hi2c->ErrorCallback(hi2c);\n#else\n        HAL_I2C_ErrorCallback(hi2c);\n#endif /* USE_HAL_I2C_REGISTER_CALLBACKS */\n      }\n    }\n    else\n    {\n      /* Do nothing */\n    }\n  }\n}", "docstring": "/**\n  * @brief  Handle RXNE flag for Master\n  * @param  hi2c Pointer to a I2C_HandleTypeDef structure that contains\n  *         the configuration information for I2C module\n  * @retval None\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone_Lite/Drivers/STM32F1xx_HAL_Driver/Src/stm32f1xx_hal_i2c.c#L5615-L5715", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UART_WaitOnFlagUntilTimeout", "code": "static HAL_StatusTypeDef UART_WaitOnFlagUntilTimeout(UART_HandleTypeDef *huart, uint32_t Flag, FlagStatus Status, uint32_t Tickstart, uint32_t Timeout)\n{\n  /* Wait until flag is set */\n  while ((__HAL_UART_GET_FLAG(huart, Flag) ? SET : RESET) == Status)\n  {\n    /* Check for the Timeout */\n    if (Timeout != HAL_MAX_DELAY)\n    {\n      if ((Timeout == 0U) || ((HAL_GetTick() - Tickstart) > Timeout))\n      {\n        /* Disable TXE, RXNE, PE and ERR (Frame error, noise error, overrun error) interrupts for the interrupt process */\n        CLEAR_BIT(huart->Instance->CR1, (USART_CR1_RXNEIE | USART_CR1_PEIE | USART_CR1_TXEIE));\n        CLEAR_BIT(huart->Instance->CR3, USART_CR3_EIE);\n\n        huart->gState  = HAL_UART_STATE_READY;\n        huart->RxState = HAL_UART_STATE_READY;\n\n        /* Process Unlocked */\n        __HAL_UNLOCK(huart);\n\n        return HAL_TIMEOUT;\n      }\n    }\n  }\n  return HAL_OK;\n}", "docstring": "/**\n  * @brief  This function handles UART Communication Timeout.\n  * @param  huart  Pointer to a UART_HandleTypeDef structure that contains\n  *                the configuration information for the specified UART module.\n  * @param  Flag specifies the UART flag to check.\n  * @param  Status The new Flag status (SET or RESET).\n  * @param  Tickstart Tick start value\n  * @param  Timeout Timeout duration\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone_Lite/Drivers/STM32F1xx_HAL_Driver/Src/stm32f1xx_hal_uart.c#L3167-L3192", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SEGGER_RTT_WriteNoLock", "code": "unsigned SEGGER_RTT_WriteNoLock(unsigned BufferIndex, const void* pBuffer, unsigned NumBytes) {\n  unsigned              Status;\n  unsigned              Avail;\n  const char*           pData;\n  SEGGER_RTT_BUFFER_UP* pRing;\n\n  pData = (const char *)pBuffer;\n  //\n  // Get \"to-host\" ring buffer.\n  //\n  pRing = &_SEGGER_RTT.aUp[BufferIndex];\n  //\n  // How we output depends upon the mode...\n  //\n  switch (pRing->Flags) {\n  case SEGGER_RTT_MODE_NO_BLOCK_SKIP:\n    //\n    // If we are in skip mode and there is no space for the whole\n    // of this output, don't bother.\n    //\n    Avail = _GetAvailWriteSpace(pRing);\n    if (Avail < NumBytes) {\n      Status = 0u;\n    } else {\n      Status = NumBytes;\n      _WriteNoCheck(pRing, pData, NumBytes);\n    }\n    break;\n  case SEGGER_RTT_MODE_NO_BLOCK_TRIM:\n    //\n    // If we are in trim mode, trim to what we can output without blocking.\n    //\n    Avail = _GetAvailWriteSpace(pRing);\n    Status = Avail < NumBytes ? Avail : NumBytes;\n    _WriteNoCheck(pRing, pData, Status);\n    break;\n  case SEGGER_RTT_MODE_BLOCK_IF_FIFO_FULL:\n    //\n    // If we are in blocking mode, output everything.\n    //\n    Status = _WriteBlocking(pRing, pData, NumBytes);\n    break;\n  default:\n    Status = 0u;\n    break;\n  }\n  //\n  // Finish up.\n  //\n  return Status;\n}", "docstring": "/*********************************************************************\n*\n*       SEGGER_RTT_WriteNoLock\n*\n*  Function description\n*    Stores a specified number of characters in SEGGER RTT\n*    control block which is then read by the host.\n*    SEGGER_RTT_WriteNoLock does not lock the application.\n*\n*  Parameters\n*    BufferIndex  Index of \"Up\"-buffer to be used (e.g. 0 for \"Terminal\").\n*    pBuffer      Pointer to character array. Does not need to point to a \\0 terminated string.\n*    NumBytes     Number of bytes to be stored in the SEGGER RTT control block.\n*\n*  Return value\n*    Number of bytes which have been stored in the \"Up\"-buffer.\n*\n*  Notes\n*    (1) Data is stored according to buffer flags.\n*    (2) For performance reasons this function does not call Init()\n*        and may only be called after RTT has been initialized.\n*        Either by calling SEGGER_RTT_Init() or calling another RTT API function first.\n*/", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone_Lite/Middlewares/SEGGER/SEGGER_RTT.c#L895-L945", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SEGGER_SYSVIEW_RecordU32x4", "code": "void SEGGER_SYSVIEW_RecordU32x4(unsigned int EventID, U32 Para0, U32 Para1, U32 Para2, U32 Para3) {\n  U8* pPayload;\n  U8* pPayloadStart;\n  RECORD_START(SEGGER_SYSVIEW_INFO_SIZE + 4 * SEGGER_SYSVIEW_QUANTA_U32);\n  //\n  pPayload = pPayloadStart;\n  ENCODE_U32(pPayload, Para0);\n  ENCODE_U32(pPayload, Para1);\n  ENCODE_U32(pPayload, Para2);\n  ENCODE_U32(pPayload, Para3);\n  _SendPacket(pPayloadStart, pPayload, EventID);\n  RECORD_END();\n}", "docstring": "/*********************************************************************\n*\n*       SEGGER_SYSVIEW_RecordU32x4()\n*\n*  Function description\n*    Formats and sends a SystemView packet containing 4 U32 parameter payload.\n*\n*  Parameters\n*    EventID - SystemView event ID.\n*    Para0   - The 32-bit parameter encoded to SystemView packet payload.\n*    Para1   - The 32-bit parameter encoded to SystemView packet payload.\n*    Para2   - The 32-bit parameter encoded to SystemView packet payload.\n*    Para3   - The 32-bit parameter encoded to SystemView packet payload.\n*/", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone_Lite/Middlewares/SEGGER/SEGGER_SYSVIEW.c#L1389-L1401", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_I2C_MspDeInit", "code": "void HAL_I2C_MspDeInit(I2C_HandleTypeDef* hi2c)\n{\n  if(hi2c->Instance==I2C1)\n  {\n  /* USER CODE BEGIN I2C1_MspDeInit 0 */\n\n  /* USER CODE END I2C1_MspDeInit 0 */\n    /* Peripheral clock disable */\n    __HAL_RCC_I2C1_CLK_DISABLE();\n\n    /**I2C1 GPIO Configuration\n    PB6     ------> I2C1_SCL\n    PB7     ------> I2C1_SDA\n    */\n    HAL_GPIO_DeInit(GPIOB, GPIO_PIN_6);\n\n    HAL_GPIO_DeInit(GPIOB, GPIO_PIN_7);\n\n  /* USER CODE BEGIN I2C1_MspDeInit 1 */\n\n  /* USER CODE END I2C1_MspDeInit 1 */\n  }\n\n}", "docstring": "/**\n* @brief I2C MSP De-Initialization\n* This function freeze the hardware resources used in this example\n* @param hi2c: I2C handle pointer\n* @retval None\n*/", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARdrone_Lite/Src/stm32f1xx_hal_msp.c#L218-L241", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "I2C_WaitOnBTFFlagUntilTimeout", "code": "static HAL_StatusTypeDef I2C_WaitOnBTFFlagUntilTimeout(I2C_HandleTypeDef *hi2c, uint32_t Timeout, uint32_t Tickstart)\n{\n  while (__HAL_I2C_GET_FLAG(hi2c, I2C_FLAG_BTF) == RESET)\n  {\n    /* Check if a NACK is detected */\n    if (I2C_IsAcknowledgeFailed(hi2c) != HAL_OK)\n    {\n      return HAL_ERROR;\n    }\n\n    /* Check for the Timeout */\n    if (Timeout != HAL_MAX_DELAY)\n    {\n      if (((HAL_GetTick() - Tickstart) > Timeout) || (Timeout == 0U))\n      {\n        hi2c->PreviousState       = I2C_STATE_NONE;\n        hi2c->State               = HAL_I2C_STATE_READY;\n        hi2c->Mode                = HAL_I2C_MODE_NONE;\n        hi2c->ErrorCode           |= HAL_I2C_ERROR_TIMEOUT;\n\n        /* Process Unlocked */\n        __HAL_UNLOCK(hi2c);\n\n        return HAL_ERROR;\n      }\n    }\n  }\n  return HAL_OK;\n}", "docstring": "/**\n  * @brief  This function handles I2C Communication Timeout for specific usage of BTF flag.\n  * @param  hi2c Pointer to a I2C_HandleTypeDef structure that contains\n  *                the configuration information for the specified I2C.\n  * @param  Timeout Timeout duration\n  * @param  Tickstart Tick start value\n  * @retval HAL status\n  */", "url": "https://github.com/Excitablecell/GEARdrones/blob/489001958d76c251128a953a47555c68bfe65fd8/Firmware/GEARremote/Drivers/STM32F1xx_HAL_Driver/Src/stm32f1xx_hal_i2c.c#L7412-L7440", "sha": "489001958d76c251128a953a47555c68bfe65fd8"}
+{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "XIic_SetOptions", "code": "void XIic_SetOptions(XIic *InstancePtr, u32 NewOptions)\n{\n\tu32 CntlReg;\n\n\tXil_AssertVoid(InstancePtr != NULL);\n\n\tXIic_IntrGlobalDisable(InstancePtr->BaseAddress);\n\n\t/*\n\t * Update the options in the instance and get the contents of the\n\t * control register such that the general call option can be modified.\n\t */\n\tInstancePtr->Options = NewOptions;\n\tCntlReg = XIic_ReadReg(InstancePtr->BaseAddress, XIIC_CR_REG_OFFSET);\n\n\t/*\n\t * The general call option is the only option that maps directly to\n\t * a hardware register feature.\n\t */\n\tif (NewOptions & XII_GENERAL_CALL_OPTION) {\n\t\tCntlReg |= XIIC_CR_GENERAL_CALL_MASK;\n\t} else {\n\t\tCntlReg &= ~XIIC_CR_GENERAL_CALL_MASK;\n\t}\n\n\t/*\n\t * Write the new control register value to the register.\n\t */\n\tXIic_WriteReg(InstancePtr->BaseAddress, XIIC_CR_REG_OFFSET, CntlReg);\n\n\tXIic_IntrGlobalEnable(InstancePtr->BaseAddress);\n}", "docstring": "/************************** Constant Definitions ***************************/\n/**************************** Type Definitions *****************************/\n/***************** Macros (Inline Functions) Definitions *******************/\n/************************** Function Prototypes ****************************/\n/************************** Variable Definitions **************************/\n/*****************************************************************************/\n/**\n*\n* This function sets the options for the IIC device driver. The options control\n* how the device behaves relative to the IIC bus. If an option applies to\n* how messages are sent or received on the IIC bus, it must be set prior to\n* calling functions which send or receive data.\n*\n* To set multiple options, the values must be ORed together. To not change\n* existing options, read/modify/write with the current options using\n* XIic_GetOptions().\n*\n* USAGE EXAMPLE:\n*\n* Read/modify/write to enable repeated start:\n* 
\n*   u8 Options;\n*   Options = XIic_GetOptions(&Iic);\n*   XIic_SetOptions(&Iic, Options | XII_REPEATED_START_OPTION);\n* 
\n*\n* Disabling General Call:\n*
\n*   Options = XIic_GetOptions(&Iic);\n*   XIic_SetOptions(&Iic, Options &= ~XII_GENERAL_CALL_OPTION);\n* 
\n*\n* @param\tInstancePtr is a pointer to the XIic instance to be worked on.\n* @param\tNewOptions are the options to be set. See xiic.h for a list of\n*\t\tthe available options.\n*\n* @return\tNone.\n*\n* @note\n*\n* Sending or receiving messages with repeated start enabled, and then\n* disabling repeated start, will not take effect until another master\n* transaction is completed. i.e. After using repeated start, the bus will\n* continue to be throttled after repeated start is disabled until a master\n* transaction occurs allowing the IIC to release the bus.\n*

\n* Options enabled will have a 1 in its appropriate bit position.\n*\n****************************************************************************/", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodHYGRO_v1_0/drivers/PmodHYGRO_v1_0/src/xiic_options.c#L116-L147", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NAV_ReadRegister", "code": "void NAV_ReadRegister(PmodNAV *InstancePtr, uint8_t bInst, uint8_t bAddr,\n uint8_t bCntBytes, uint8_t *pData) {\n u8 recv[bCntBytes + 1];\n int i = 0;\n\n for (i = 0; i < bCntBytes; i++) {\n recv[i + 1] = 0;\n }\n\n // Add write bit\n if (bInst == NAV_INST_AG)\n recv[0] = bAddr | 0x80;\n else\n recv[0] = bAddr | 0xC0;\n\n // Select a slave\n XSpi_SetSlaveSelect(&InstancePtr->NAVSpi, bInst);\n\n XSpi_Transfer(&InstancePtr->NAVSpi, recv, recv, bCntBytes + 1);\n\n // Set chip select high\n XSpi_SetSlaveSelect(&InstancePtr->NAVSpi, 0b000);\n\n for (i = 0; i < bCntBytes; i++) {\n pData[i] = recv[i + 1];\n }\n}", "docstring": "/* ------------------------------------------------------------ */\n/*** void NAV_ReadRegister(PmodNAV *InstancePtr, uint8_t bInst, uint8_t bAddr, uint8_t bCntBytes, uint8_t *pData)\n**\n** Parameters:\n** InstancePtr - instance of PmodNAV\n** bInst - instrument Chip Select to be used: Accelerometer/Gyro,\n** Magnetometer or Altimeter\n** bAddr - register address to start reading bytes from\n** bCntBytes - number of bytes to be read\n** pData - pointer to the 16 bit data array to be read\n**\n** Return Value:\n** None\n**\n** Errors:\n** None\n**\n** Description:\n** Reads bCntBytes bytes from device via SPI, from register having\n** consecutive addresses, starting with bAddr.\n*/", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodNAV_v1_0/drivers/PmodNAV_v1_0/src/PmodNAV.c#L565-L591", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NAV_SetIntThresholdALT", "code": "void NAV_SetIntThresholdALT(PmodNAV *InstancePtr, float thVal) {\n u8 buffer[2];\n u16 bthVal;\n // Converts the float value in gauss to raw magnetic field data to be written\n // in NAV_MAG_INT_THS_L/NAV_MAG_INT_THS_H registers\n bthVal = (u16) (thVal * 4096);\n // Split bytes\n // Make sure the first bit of the High byte is 0, for correct functionality\n // of the device\n buffer[0] = (bthVal & 0xFF00) >> 8;\n buffer[1] = (bthVal & 0x00FF);\n NAV_WriteSPI(InstancePtr, NAV_INST_ALT, NAV_ALT_THS_P_H, buffer[0]);\n NAV_WriteSPI(InstancePtr, NAV_INST_ALT, NAV_ALT_THS_P_L, buffer[1]);\n}", "docstring": "/* ------------------------------------------------------------ */\n/*** void NAV_SetIntThresholdALT(PmodNAV *InstancePtr, float thVal)\n**\n** Parameters:\n** InstancePtr - instance of PmodNAV\n** thVal - the interrupt threshold parameter for alt instrument\n**\n** Return Value:\n** None\n**\n** Errors:\n** None\n**\n** Description:\n** The function sets the interrupt threshold for the altimeter instrument\n*/", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodNAV_v1_0/drivers/PmodNAV_v1_0/src/PmodNAV.c#L2100-L2113", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DynRecvMasterData", "code": "static void DynRecvMasterData(XIic *InstancePtr)\n{\n\tu8 LoopCnt;\n\tu8 BytesInFifo;\n\tu8 BytesToRead;\n\tu32 CntlReg;\n\n\t/*\n\t * Device is a master receiving, get the contents of the control\n\t * register and determine the number of bytes in fifo to be read out.\n\t */\n\tCntlReg = XIic_ReadReg(InstancePtr->BaseAddress, XIIC_CR_REG_OFFSET);\n\tBytesInFifo = (u8) XIic_ReadReg(InstancePtr->BaseAddress,\n\t\t\t\t\t XIIC_RFO_REG_OFFSET) + 1;\n\n\t/*\n\t * If data in FIFO holds all data to be retrieved - 1, set NOACK and\n\t * disable the Tx error.\n\t */\n\tif ((InstancePtr->RecvByteCount - BytesInFifo) == 1) {\n\t\t/*\n\t\t * Disable Tx error interrupt to prevent interrupt as this\n\t\t * device will cause it when it set NO ACK next.\n\t\t */\n\t\tXIic_DisableIntr(InstancePtr->BaseAddress,\n\t\t\t\t XIIC_INTR_TX_ERROR_MASK);\n\t\tXIic_ClearIntr(InstancePtr->BaseAddress,\n\t\t\t\tXIIC_INTR_TX_ERROR_MASK);\n\n\t\t/*\n\t\t * Read one byte to clear a place for the last byte to be read\n\t\t * which will set the NO ACK.\n\t\t */\n\t\tXIic_ReadRecvByte(InstancePtr);\n\t}\n\n\t/*\n\t * If data in FIFO is all the data to be received then get the data and\n\t * also leave the device in a good state for the next transaction.\n\t */\n\telse if ((InstancePtr->RecvByteCount - BytesInFifo) == 0) {\n\t\tif (InstancePtr->Options & XII_REPEATED_START_OPTION) {\n\t\t\tCntlReg |= XIIC_CR_REPEATED_START_MASK;\n\t\t\tXIic_WriteReg(InstancePtr->BaseAddress,\n\t\t\t\t\tXIIC_CR_REG_OFFSET,\n\t\t\t\t\tCntlReg);\n\t\t}\n\n\t\t/*\n\t\t * Read data from the FIFO then set zero based FIFO read depth\n\t\t * for a byte.\n\t\t */\n\t\tfor (LoopCnt = 0; LoopCnt < BytesInFifo; LoopCnt++) {\n\t\t\tXIic_ReadRecvByte(InstancePtr);\n\t\t}\n\n\t\tXIic_WriteReg(InstancePtr->BaseAddress,\n\t\t\t\tXIIC_RFD_REG_OFFSET, 0);\n\n\t\t/*\n\t\t * Disable Rx full interrupt and write the control reg with ACK\n\t\t * allowing next byte sent to be acknowledged automatically.\n\t\t */\n\t\tXIic_DisableIntr(InstancePtr->BaseAddress,\n\t\t\t\t XIIC_INTR_RX_FULL_MASK);\n\n\t\t/*\n\t\t * Send notification of msg Rx complete in RecvHandler callback.\n\t\t */\n\t\tInstancePtr->RecvHandler(InstancePtr->RecvCallBackRef, 0);\n\t}\n\telse {\n\t\t/*\n\t\t * Fifo data not at n-1, read all but the last byte of data\n\t\t * from the slave, if more than a FIFO full yet to receive\n\t\t * read a FIFO full.\n\t\t */\n\t\tBytesToRead = InstancePtr->RecvByteCount - BytesInFifo - 1;\n\t\tif (BytesToRead > IIC_RX_FIFO_DEPTH) {\n\t\t\tBytesToRead = IIC_RX_FIFO_DEPTH;\n\t\t}\n\n\t\t/*\n\t\t * Read in data from the FIFO.\n\t\t */\n\t\tfor (LoopCnt = 0; LoopCnt < BytesToRead; LoopCnt++) {\n\t\t\tXIic_ReadRecvByte(InstancePtr);\n\t\t}\n\t}\n}", "docstring": "/*****************************************************************************/\n/**\n*\n* This function is called when the receive register is full. The number\n* of bytes received to cause the interrupt is adjustable using the Receive FIFO\n* Depth register. The number of bytes in the register is read in the Receive\n* FIFO occupancy register. Both these registers are zero based values (0-15)\n* such that a value of zero indicates 1 byte.\n*\n* For a Master Receiver to properly signal the end of a message, the data must\n* be read in up to the message length - 1, where control register bits will be\n* set for bus controls to occur on reading of the last byte.\n*\n* @param\tInstancePtr is a pointer to the XIic instance to be worked on.\n*\n* @return\tNone.\n*\n* @note\t\tNone.\n*\n******************************************************************************/", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodRTCC_v1_0/drivers/PmodRTCC_v1_0/src/xiic_dyn_master.c#L466-L555", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DynSendMasterData", "code": "static void DynSendMasterData(XIic *InstancePtr)\n{\n\tu32 CntlReg;\n\n\t/*\n\t * In between 1st and last byte of message, fill the FIFO with more data\n\t * to send, disable the 1/2 empty interrupt based upon data left to\n\t * send.\n\t */\n\tif (InstancePtr->SendByteCount > 1) {\n\t\tXIic_TransmitFifoFill(InstancePtr, XIIC_MASTER_ROLE);\n\n\t\tif (InstancePtr->SendByteCount < 2) {\n\t\t\tXIic_DisableIntr(InstancePtr->BaseAddress,\n\t\t\t\t\t XIIC_INTR_TX_HALF_MASK);\n\t\t}\n\t}\n\n\t/*\n\t * If there is only one byte left to send, processing differs between\n\t * repeated start and normal messages.\n\t */\n\telse if (InstancePtr->SendByteCount == 1) {\n\t\t/*\n\t\t * When using repeated start, another interrupt is expected\n\t\t * after the last byte has been sent, so the message is not\n\t\t * done yet.\n\t\t */\n\t\tif (InstancePtr->Options & XII_REPEATED_START_OPTION) {\n\t\t\tXIic_WriteSendByte(InstancePtr);\n\t\t} else {\n\t\t\tXIic_DynSendStop(InstancePtr->BaseAddress,\n\t\t\t\t\t *InstancePtr->SendBufferPtr);\n\n\t\t\t/*\n\t\t\t * Wait for bus to not be busy before declaring message\n\t\t\t * has been sent for the no repeated start operation.\n\t\t\t * The callback will be called from the BusNotBusy part\n\t\t\t * of the Interrupt handler to ensure that the message\n\t\t\t * is completely sent. Disable the Tx interrupts and\n\t\t\t * enable the BNB interrupt.\n\t\t\t */\n\t\t\tInstancePtr->BNBOnly = FALSE;\n\t\t\tXIic_DisableIntr(InstancePtr->BaseAddress,\n\t\t\t\t\t XIIC_TX_INTERRUPTS);\n\t\t\tXIic_EnableIntr(InstancePtr->BaseAddress,\n\t\t\t\t\t XIIC_INTR_BNB_MASK);\n\t\t}\n\t} else {\n\t\tif (InstancePtr->Options & XII_REPEATED_START_OPTION) {\n\t\t\t/*\n\t\t\t * The message being sent has completed. When using\n\t\t\t * repeated start with no more bytes to send repeated\n\t\t\t * start needs to be set in the control register so\n\t\t\t * that the bus will still be held by this master.\n\t\t\t */\n\t\t\tCntlReg = XIic_ReadReg(InstancePtr->BaseAddress,\n\t\t\t\t\tXIIC_CR_REG_OFFSET);\n\t\t\tCntlReg |= XIIC_CR_REPEATED_START_MASK;\n\t\t\tXIic_WriteReg(InstancePtr->BaseAddress,\n\t\t\t\t\tXIIC_CR_REG_OFFSET, CntlReg);\n\n\t\t\t/*\n\t\t\t * If the message that was being sent has finished,\n\t\t\t * disable all transmit interrupts and call the callback\n\t\t\t * that was setup to indicate the message was sent,\n\t\t\t * with 0 bytes remaining.\n\t\t\t */\n\t\t\tXIic_DisableIntr(InstancePtr->BaseAddress,\n\t\t\t\t\t XIIC_TX_INTERRUPTS);\n\t\t\tInstancePtr->SendHandler(InstancePtr->SendCallBackRef,\n\t\t\t\t\t\t 0);\n\t\t}\n\t}\n\n\treturn;\n}", "docstring": "/******************************************************************************\n*\n* When the IIC Tx FIFO/register goes empty, this routine is called by the\n* interrupt service routine to fill the transmit FIFO with data to be sent.\n*\n* This function also is called by the Tx � empty interrupt as the data handling\n* is identical when you don't assume the FIFO is empty but use the Tx_FIFO_OCY\n* register to indicate the available free FIFO bytes.\n*\n* @param\tInstancePtr is a pointer to the XIic instance to be worked on.\n*\n* @return\tNone.\n*\n* @note\t\tNone.\n*\n******************************************************************************/", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodTMP3_v1_0/drivers/PmodTMP3_v1_0/src/xiic_dyn_master.c#L218-L294", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WF_SecurityWpaSet", "code": "void WF_SecurityWpaSet(t_wpaContext* p_context)\n{\n#if defined(WF_ERROR_CHECKING)\n uint32_t errorCode;\n\n errorCode = UdSetSecurityWpa(p_context);\n if (errorCode != UD_SUCCESS)\n {\n EventEnqueue(WF_EVENT_ERROR, errorCode);\n return;\n }\n#endif /* WF_ERROR_CHECKING */\n\n WF_SetSecurity(p_context->wpaSecurityType,\n 0, // not used\n p_context->keyInfo.key,\n p_context->keyInfo.keyLength);\n}", "docstring": "//============================================================================", "url": "https://github.com/suisuisi/FPGA_Library/blob/1e33525198872d63ced48e8f0cebaa2419b9eb22/ThreePart/digilent_ip/ip/Pmods/PmodWIFI_v1_0/drivers/PmodWIFI_v1_0/src/MRF24G/utility/wf_connection_profile.c#L367-L384", "sha": "1e33525198872d63ced48e8f0cebaa2419b9eb22"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_DMAEx_ConfigMuxRequestGenerator", "code": "HAL_StatusTypeDef HAL_DMAEx_ConfigMuxRequestGenerator(DMA_HandleTypeDef *hdma,\n HAL_DMA_MuxRequestGeneratorConfigTypeDef *pRequestGeneratorConfig)\n{\n /* Check the parameters */\n assert_param(IS_DMA_ALL_INSTANCE(hdma->Instance));\n\n assert_param(IS_DMAMUX_REQUEST_GEN_SIGNAL_ID(pRequestGeneratorConfig->SignalID));\n\n assert_param(IS_DMAMUX_REQUEST_GEN_POLARITY(pRequestGeneratorConfig->Polarity));\n assert_param(IS_DMAMUX_REQUEST_GEN_REQUEST_NUMBER(pRequestGeneratorConfig->RequestNumber));\n\n /* check if the DMA state is ready\n and DMA is using a DMAMUX request generator block\n */\n if ((hdma->State == HAL_DMA_STATE_READY) && (hdma->DMAmuxRequestGen != 0U))\n {\n /* Process Locked */\n __HAL_LOCK(hdma);\n\n /* Set the request generator new parameters */\n hdma->DMAmuxRequestGen->RGCR = pRequestGeneratorConfig->SignalID | \\\n ((pRequestGeneratorConfig->RequestNumber - 1U) << (POSITION_VAL(DMAMUX_RGxCR_GNBREQ) & 0x1FU)) | \\\n pRequestGeneratorConfig->Polarity;\n /* Process UnLocked */\n __HAL_UNLOCK(hdma);\n\n return HAL_OK;\n }\n else\n {\n return HAL_ERROR;\n }\n}", "docstring": "/**\n * @brief Configure the DMAMUX request generator block used by the given DMA channel (instance).\n * @param hdma: pointer to a DMA_HandleTypeDef structure that contains\n * the configuration information for the specified DMA channel.\n * @param pRequestGeneratorConfig : pointer to HAL_DMA_MuxRequestGeneratorConfigTypeDef :\n * contains the request generator parameters.\n *\n * @retval HAL status\n */", "url": "https://github.com/klonyyy/MCUViewer/blob/3650ed7feaff963ca76abcb72423ee220cdbfa69/example/MCUViewer_test/Drivers/STM32G4xx_HAL_Driver/Src/stm32g4xx_hal_dma_ex.c#L141-L173", "sha": "3650ed7feaff963ca76abcb72423ee220cdbfa69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_EXTI_GetConfigLine", "code": "HAL_StatusTypeDef HAL_EXTI_GetConfigLine(EXTI_HandleTypeDef *hexti, EXTI_ConfigTypeDef *pExtiConfig)\n{\n __IO uint32_t *regaddr;\n uint32_t regval;\n uint32_t linepos;\n uint32_t maskline;\n uint32_t offset;\n\n /* Check null pointer */\n if ((hexti == NULL) || (pExtiConfig == NULL))\n {\n return HAL_ERROR;\n }\n\n /* Check the parameter */\n assert_param(IS_EXTI_LINE(hexti->Line));\n\n /* Store handle line number to configuration structure */\n pExtiConfig->Line = hexti->Line;\n\n /* Compute line register offset and line mask */\n offset = ((pExtiConfig->Line & EXTI_REG_MASK) >> EXTI_REG_SHIFT);\n /* Compute line position */\n linepos = (pExtiConfig->Line & EXTI_PIN_MASK);\n /* Compute mask */\n maskline = (1uL << linepos);\n\n /* 1] Get core mode : interrupt */\n regaddr = (&EXTI->IMR1 + (EXTI_MODE_OFFSET * offset));\n regval = *regaddr;\n\n /* Check if selected line is enable */\n if ((regval & maskline) != 0x00u)\n {\n pExtiConfig->Mode = EXTI_MODE_INTERRUPT;\n }\n else\n {\n pExtiConfig->Mode = EXTI_MODE_NONE;\n }\n\n /* Get event mode */\n regaddr = (&EXTI->EMR1 + (EXTI_MODE_OFFSET * offset));\n regval = *regaddr;\n\n /* Check if selected line is enable */\n if ((regval & maskline) != 0x00u)\n {\n pExtiConfig->Mode |= EXTI_MODE_EVENT;\n }\n\n /* Get default Trigger and GPIOSel configuration */\n pExtiConfig->Trigger = EXTI_TRIGGER_NONE;\n pExtiConfig->GPIOSel = 0x00u;\n\n /* 2] Get trigger for configurable lines : rising */\n if ((pExtiConfig->Line & EXTI_CONFIG) != 0x00u)\n {\n regaddr = (&EXTI->RTSR1 + (EXTI_CONFIG_OFFSET * offset));\n regval = *regaddr;\n\n /* Check if configuration of selected line is enable */\n if ((regval & maskline) != 0x00u)\n {\n pExtiConfig->Trigger = EXTI_TRIGGER_RISING;\n }\n\n /* Get falling configuration */\n regaddr = (&EXTI->FTSR1 + (EXTI_CONFIG_OFFSET * offset));\n regval = *regaddr;\n\n /* Check if configuration of selected line is enable */\n if ((regval & maskline) != 0x00u)\n {\n pExtiConfig->Trigger |= EXTI_TRIGGER_FALLING;\n }\n\n /* Get Gpio port selection for gpio lines */\n if ((pExtiConfig->Line & EXTI_GPIO) == EXTI_GPIO)\n {\n assert_param(IS_EXTI_GPIO_PIN(linepos));\n\n regval = SYSCFG->EXTICR[linepos >> 2u];\n pExtiConfig->GPIOSel = (regval >> (SYSCFG_EXTICR1_EXTI1_Pos * (linepos & 0x03u))) & SYSCFG_EXTICR1_EXTI0;\n }\n }\n\n return HAL_OK;\n}", "docstring": "/**\n * @brief Get configuration of a dedicated Exti line.\n * @param hexti Exti handle.\n * @param pExtiConfig Pointer on structure to store Exti configuration.\n * @retval HAL Status.\n */", "url": "https://github.com/klonyyy/MCUViewer/blob/3650ed7feaff963ca76abcb72423ee220cdbfa69/example/MCUViewer_test/Drivers/STM32G4xx_HAL_Driver/Src/stm32g4xx_hal_exti.c#L268-L356", "sha": "3650ed7feaff963ca76abcb72423ee220cdbfa69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FLASH_OB_BootLockConfig", "code": "static HAL_StatusTypeDef FLASH_OB_BootLockConfig(uint32_t BootLockConfig)\n{\n HAL_StatusTypeDef status;\n\n /* Check the parameters */\n assert_param(IS_OB_BOOT_LOCK(BootLockConfig));\n\n /* Wait for last operation to be completed */\n status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE);\n\n if (status == HAL_OK)\n {\n MODIFY_REG(FLASH->SEC1R, FLASH_SEC1R_BOOT_LOCK, BootLockConfig);\n\n /* Set OPTSTRT Bit */\n SET_BIT(FLASH->CR, FLASH_CR_OPTSTRT);\n\n /* Wait for last operation to be completed */\n status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE);\n }\n\n return status;\n}", "docstring": "/**\n * @brief Configure the Boot Lock into Option Bytes.\n * @note To configure any option bytes, the option lock bit OPTLOCK must be\n * cleared with the call of HAL_FLASH_OB_Unlock() function.\n * @note New option bytes configuration will be taken into account in two cases:\n * - after an option bytes launch through the call of HAL_FLASH_OB_Launch()\n * - after a power reset (BOR reset or exit from Standby/Shutdown modes)\n * @param BootLockConfig specifies the boot lock configuration.\n * This parameter can be one of the following values:\n * @arg OB_BOOT_LOCK_ENABLE: Enable Boot Lock\n * @arg OB_BOOT_LOCK_DISABLE: Disable Boot Lock\n *\n * @retval HAL_Status\n */", "url": "https://github.com/klonyyy/MCUViewer/blob/3650ed7feaff963ca76abcb72423ee220cdbfa69/example/MCUViewer_test/Drivers/STM32G4xx_HAL_Driver/Src/stm32g4xx_hal_flash_ex.c#L1175-L1197", "sha": "3650ed7feaff963ca76abcb72423ee220cdbfa69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_TIM_PWM_Start_IT", "code": "HAL_StatusTypeDef HAL_TIM_PWM_Start_IT(TIM_HandleTypeDef *htim, uint32_t Channel)\n{\n HAL_StatusTypeDef status = HAL_OK;\n uint32_t tmpsmcr;\n\n /* Check the parameters */\n assert_param(IS_TIM_CCX_CHANNEL(htim->Instance, Channel));\n\n /* Check the TIM channel state */\n if (TIM_CHANNEL_STATE_GET(htim, Channel) != HAL_TIM_CHANNEL_STATE_READY)\n {\n return HAL_ERROR;\n }\n\n /* Set the TIM channel state */\n TIM_CHANNEL_STATE_SET(htim, Channel, HAL_TIM_CHANNEL_STATE_BUSY);\n\n switch (Channel)\n {\n case TIM_CHANNEL_1:\n {\n /* Enable the TIM Capture/Compare 1 interrupt */\n __HAL_TIM_ENABLE_IT(htim, TIM_IT_CC1);\n break;\n }\n\n case TIM_CHANNEL_2:\n {\n /* Enable the TIM Capture/Compare 2 interrupt */\n __HAL_TIM_ENABLE_IT(htim, TIM_IT_CC2);\n break;\n }\n\n case TIM_CHANNEL_3:\n {\n /* Enable the TIM Capture/Compare 3 interrupt */\n __HAL_TIM_ENABLE_IT(htim, TIM_IT_CC3);\n break;\n }\n\n case TIM_CHANNEL_4:\n {\n /* Enable the TIM Capture/Compare 4 interrupt */\n __HAL_TIM_ENABLE_IT(htim, TIM_IT_CC4);\n break;\n }\n\n default:\n status = HAL_ERROR;\n break;\n }\n\n if (status == HAL_OK)\n {\n /* Enable the Capture compare channel */\n TIM_CCxChannelCmd(htim->Instance, Channel, TIM_CCx_ENABLE);\n\n if (IS_TIM_BREAK_INSTANCE(htim->Instance) != RESET)\n {\n /* Enable the main output */\n __HAL_TIM_MOE_ENABLE(htim);\n }\n\n /* Enable the Peripheral, except in trigger mode where enable is automatically done with trigger */\n if (IS_TIM_SLAVE_INSTANCE(htim->Instance))\n {\n tmpsmcr = htim->Instance->SMCR & TIM_SMCR_SMS;\n if (!IS_TIM_SLAVEMODE_TRIGGER_ENABLED(tmpsmcr))\n {\n __HAL_TIM_ENABLE(htim);\n }\n }\n else\n {\n __HAL_TIM_ENABLE(htim);\n }\n }\n\n /* Return function status */\n return status;\n}", "docstring": "/**\n * @brief Starts the PWM signal generation in interrupt mode.\n * @param htim TIM PWM handle\n * @param Channel TIM Channel to be enabled\n * This parameter can be one of the following values:\n * @arg TIM_CHANNEL_1: TIM Channel 1 selected\n * @arg TIM_CHANNEL_2: TIM Channel 2 selected\n * @arg TIM_CHANNEL_3: TIM Channel 3 selected\n * @arg TIM_CHANNEL_4: TIM Channel 4 selected\n * @retval HAL status\n */", "url": "https://github.com/klonyyy/MCUViewer/blob/3650ed7feaff963ca76abcb72423ee220cdbfa69/example/MCUViewer_test/Drivers/STM32G4xx_HAL_Driver/Src/stm32g4xx_hal_tim.c#L1566-L1646", "sha": "3650ed7feaff963ca76abcb72423ee220cdbfa69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_TIM_IC_Stop_DMA", "code": "HAL_StatusTypeDef HAL_TIM_IC_Stop_DMA(TIM_HandleTypeDef *htim, uint32_t Channel)\n{\n HAL_StatusTypeDef status = HAL_OK;\n\n /* Check the parameters */\n assert_param(IS_TIM_CCX_CHANNEL(htim->Instance, Channel));\n assert_param(IS_TIM_DMA_CC_INSTANCE(htim->Instance));\n\n /* Disable the Input Capture channel */\n TIM_CCxChannelCmd(htim->Instance, Channel, TIM_CCx_DISABLE);\n\n switch (Channel)\n {\n case TIM_CHANNEL_1:\n {\n /* Disable the TIM Capture/Compare 1 DMA request */\n __HAL_TIM_DISABLE_DMA(htim, TIM_DMA_CC1);\n (void)HAL_DMA_Abort_IT(htim->hdma[TIM_DMA_ID_CC1]);\n break;\n }\n\n case TIM_CHANNEL_2:\n {\n /* Disable the TIM Capture/Compare 2 DMA request */\n __HAL_TIM_DISABLE_DMA(htim, TIM_DMA_CC2);\n (void)HAL_DMA_Abort_IT(htim->hdma[TIM_DMA_ID_CC2]);\n break;\n }\n\n case TIM_CHANNEL_3:\n {\n /* Disable the TIM Capture/Compare 3 DMA request */\n __HAL_TIM_DISABLE_DMA(htim, TIM_DMA_CC3);\n (void)HAL_DMA_Abort_IT(htim->hdma[TIM_DMA_ID_CC3]);\n break;\n }\n\n case TIM_CHANNEL_4:\n {\n /* Disable the TIM Capture/Compare 4 DMA request */\n __HAL_TIM_DISABLE_DMA(htim, TIM_DMA_CC4);\n (void)HAL_DMA_Abort_IT(htim->hdma[TIM_DMA_ID_CC4]);\n break;\n }\n\n default:\n status = HAL_ERROR;\n break;\n }\n\n if (status == HAL_OK)\n {\n /* Disable the Peripheral */\n __HAL_TIM_DISABLE(htim);\n\n /* Set the TIM channel state */\n TIM_CHANNEL_STATE_SET(htim, Channel, HAL_TIM_CHANNEL_STATE_READY);\n TIM_CHANNEL_N_STATE_SET(htim, Channel, HAL_TIM_CHANNEL_STATE_READY);\n }\n\n /* Return function status */\n return status;\n}", "docstring": "/**\n * @brief Stops the TIM Input Capture measurement in DMA mode.\n * @param htim TIM Input Capture handle\n * @param Channel TIM Channels to be disabled\n * This parameter can be one of the following values:\n * @arg TIM_CHANNEL_1: TIM Channel 1 selected\n * @arg TIM_CHANNEL_2: TIM Channel 2 selected\n * @arg TIM_CHANNEL_3: TIM Channel 3 selected\n * @arg TIM_CHANNEL_4: TIM Channel 4 selected\n * @retval HAL status\n */", "url": "https://github.com/klonyyy/MCUViewer/blob/3650ed7feaff963ca76abcb72423ee220cdbfa69/example/MCUViewer_test/Drivers/STM32G4xx_HAL_Driver/Src/stm32g4xx_hal_tim.c#L2546-L2608", "sha": "3650ed7feaff963ca76abcb72423ee220cdbfa69"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cairo_xlib_surface_set_size", "code": "void\ncairo_xlib_surface_set_size (cairo_surface_t *abstract_surface,\n\t\t\t int width,\n\t\t\t int height)\n{\n cairo_xlib_surface_t *surface = (cairo_xlib_surface_t *) abstract_surface;\n cairo_status_t status;\n\n if (unlikely (abstract_surface->status))\n\treturn;\n if (unlikely (abstract_surface->finished)) {\n\t_cairo_surface_set_error (abstract_surface,\n\t\t\t\t _cairo_error (CAIRO_STATUS_SURFACE_FINISHED));\n\treturn;\n }\n\n if (! _cairo_surface_is_xlib (abstract_surface)) {\n\t_cairo_surface_set_error (abstract_surface,\n\t\t\t\t _cairo_error (CAIRO_STATUS_SURFACE_TYPE_MISMATCH));\n\treturn;\n }\n\n if (surface->width == width && surface->height == height)\n\treturn;\n\n if (! valid_size (width, height)) {\n\t_cairo_surface_set_error (abstract_surface,\n\t\t\t\t _cairo_error (CAIRO_STATUS_INVALID_SIZE));\n\treturn;\n }\n\n status = _cairo_surface_flush (abstract_surface, 0);\n if (unlikely (status)) {\n\t_cairo_surface_set_error (abstract_surface, status);\n\treturn;\n }\n\n _cairo_xlib_surface_discard_shm (surface);\n\n surface->width = width;\n surface->height = height;\n}", "docstring": "/**\n * cairo_xlib_surface_set_size:\n * @surface: a #cairo_surface_t for the XLib backend\n * @width: the new width of the surface\n * @height: the new height of the surface\n *\n * Informs cairo of the new size of the X Drawable underlying the\n * surface. For a surface created for a Window (rather than a Pixmap),\n * this function must be called each time the size of the window\n * changes. (For a subwindow, you are normally resizing the window\n * yourself, but for a toplevel window, it is necessary to listen for\n * ConfigureNotify events.)\n *\n * A Pixmap can never change size, so it is never necessary to call\n * this function on a surface created for a Pixmap.\n *\n * Since: 1.0\n **/", "url": "https://github.com/romgrk/kui.nvim/blob/b3b2f53d6678dce86acc91043b32eab6059ce0cf/lua/kui/cairo/csrc/cairo/src/cairo-xlib-surface.c#L2090-L2131", "sha": "b3b2f53d6678dce86acc91043b32eab6059ce0cf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "qcvm_set_parm_int", "code": "void qcvm_set_parm_int(qcvm_t *qcvm, int parm, int val)\n{\n\tGET_INT(OFS_PARM0 + (parm * 3)) = val;\n}", "docstring": "/* set integer parameter */", "url": "https://github.com/erysdren/QCVM/blob/acd27d22c040f0636df61b4c8fb92a81e455262a/qcvm/qcvm_parameters.c#L68-L71", "sha": "acd27d22c040f0636df61b4c8fb92a81e455262a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "terminate_jbc", "code": "void terminate_jbc()\n{\n if (!is_jailbroken())\n return;\n\n // Restores original creds\n jbc_set_cred(&g_Cred);\n}", "docstring": "// Unload libjbc libraries", "url": "https://github.com/cy33hc/ps4-ezremote-client/blob/b7fe46cb94310f8591275d3c34107b5944137a4a/source/orbis_jbc.c#L115-L122", "sha": "b7fe46cb94310f8591275d3c34107b5944137a4a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LZ4HC_Insert", "code": "FORCE_INLINE void LZ4HC_Insert (LZ4HC_Data_Structure* hc4, const BYTE* ip)\n{\n U16* chainTable = hc4->chainTable;\n U32* HashTable = hc4->hashTable;\n const BYTE* const base = hc4->base;\n const U32 target = (U32)(ip - base);\n U32 idx = hc4->nextToUpdate;\n\n while(idx < target)\n {\n U32 h = LZ4HC_hashPtr(base+idx);\n size_t delta = idx - HashTable[h];\n if (delta>MAX_DISTANCE) delta = MAX_DISTANCE;\n DELTANEXTU16(idx) = (U16)delta;\n HashTable[h] = idx;\n idx++;\n }\n\n hc4->nextToUpdate = target;\n}", "docstring": "/* Update chains up to ip (excluded) */", "url": "https://github.com/zhangganlin/GlobalSfMpy/blob/ac6a0564d84e1d6e9a4077195e384d379aa20492/thirdparty/TheiaSfM/libraries/flann/src/cpp/flann/ext/lz4hc.c#L130-L149", "sha": "ac6a0564d84e1d6e9a4077195e384d379aa20492"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "calcrate", "code": "static speed_t\ncalcrate(int baudrate)\n{\n#ifdef B50\n\tif (baudrate == 50)\n\t\treturn B50;\n#endif\n#ifdef B75\n\tif (baudrate == 75)\n\t\treturn B75;\n#endif\n#ifdef B110\n\tif (baudrate == 110)\n\t\treturn B110;\n#endif\n#ifdef B134\n\tif (baudrate == 134)\n\t\treturn B134;\n#endif\n#ifdef B150\n\tif (baudrate == 150)\n\t\treturn B150;\n#endif\n#ifdef B200\n\tif (baudrate == 200)\n\t\treturn B200;\n#endif\n#ifdef B300\n\tif (baudrate == 300)\n\t\treturn B300;\n#endif\n#ifdef B600\n\tif (baudrate == 600)\n\t\treturn B600;\n#endif\n#ifdef B1200\n\tif (baudrate == 1200)\n\t\treturn B1200;\n#endif\n#ifdef B1800\n\tif (baudrate == 1800)\n\t\treturn B1800;\n#endif\n#ifdef B2400\n\tif (baudrate == 2400)\n\t\treturn B2400;\n#endif\n#ifdef B4800\n\tif (baudrate == 4800)\n\t\treturn B4800;\n#endif\n#ifdef B9600\n\tif (baudrate == 9600)\n\t\treturn B9600;\n#endif\n#ifdef B19200\n\telse if (baudrate == 19200)\n\t\treturn B19200;\n#endif\n#ifdef B38400\n\telse if (baudrate == 38400)\n\t\treturn B38400;\n#endif\n#ifdef B57600\n\telse if (baudrate == 57600)\n\t\treturn B57600;\n#endif\n#ifdef B76800\n\telse if (baudrate == 76800)\n\t\treturn B76800;\n#endif\n#ifdef B115200\n\telse if (baudrate == 115200)\n\t\treturn B115200;\n#endif\n#ifdef B230400\n\telse if (baudrate == 230400)\n\t\treturn B230400;\n#endif\n#ifdef B460800\n\telse if (baudrate == 460800)\n\t\treturn B460800;\n#endif\n\n\tLOG((PI_DBG_DEV, PI_DBG_LVL_ERR,\n\t\t\"DEV Serial CHANGEBAUD Unable to set baud rate %d\\n\",\n\t\tbaudrate));\n\tabort();\t/* invalid baud rate */\n\treturn 0;\n}", "docstring": "/***********************************************************************\n *\n * Function: calcrate\n *\n * Summary: validates the selected baudrate\n *\n * Paramters:\tbuadrate\n *\n * Returns: POSIX defined baudrate constant or terminates the process\n *\t\tif the requested baudrate is not supported.\n *\n ***********************************************************************/", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/Hotsync/unixserial.c#L649-L738", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ListViewLoadTable", "code": "static void ListViewLoadTable (FormPtr frm)\n{\n\tUInt16\t\t\trow;\n\tUInt16\t\t\trecordNum;\n\tUInt16\t\t\tlineHeight;\n\tUInt16\t\t\tdataHeight;\n\tUInt16\t\t\ttableHeight;\n\tUInt16\t\t\tnumRows;\n\tUInt32\t\t\tuniqueID;\n\tFontID\t\t\tcurrFont;\n\tTablePtr \t\ttable;\n\tMemHandle\t\t\trecordH;\n\tRectangleType\tr;\n\n\n\ttable = GetObjectPtr (ListTable);\n\n\tTblGetBounds (table, &r);\n\ttableHeight = r.extent.y;\n\n\tcurrFont = FntSetFont (ListFont);\n\tlineHeight = FntLineHeight ();\n\tFntSetFont (currFont);\n\n\tdataHeight = 0;\n\n\trecordNum = TopVisibleRecord;\n\n\t// For each row in the table, store the record number in the table item\n\t// that will dispaly the record.\n\tnumRows = TblGetNumberOfRows (table);\n\tfor (row = 0; row < numRows; row++)\n\t{\n\t\t// Get the next record in the currunt category.\n\t\trecordH = DmQueryNextInCategory (MemoDB, &recordNum, CurrentCategory);\n\t\tif(row == 0)\n\t\t{\n\t\t\t// store the position of the first row so we can use TopRowPositionInCategory+row\n\t\t\t// when drawing\n\t\t\tTopRowPositionInCategory = recordH ? DmPositionInCategory(MemoDB, recordNum, CurrentCategory) : 0;\n\t\t}\n\n\t\t// If the record was found, store the record number in the table item,\n\t\t// otherwise set the table row unusable.\n\t\tif (recordH && (tableHeight >= dataHeight + lineHeight))\n\t\t{\n\t\t\tTblSetRowID (table, row, recordNum);\n\t\t\tTblSetItemStyle (table, row, 0, customTableItem);\n\t\t\tTblSetItemFont (table, row, 0, ListFont);\n\n\t\t\tTblSetRowHeight (table, row, lineHeight);\n\n\t\t\tDmRecordInfo (MemoDB, recordNum, NULL, &uniqueID, NULL);\n\t\t\tif ((TblGetRowData (table, row) != uniqueID) ||\n\t\t\t\t( ! TblRowUsable (table, row)))\n\t\t\t{\n\t\t\t\tTblSetRowUsable (table, row, true);\n\n\t\t\t\t// Store the unique id of the record in the row.\n\t\t\t\tTblSetRowData (table, row, uniqueID);\n\n\t\t\t\t// Mark the row invalid so that it will draw when we call the\n\t\t\t\t// draw routine.\n\t\t\t\tTblMarkRowInvalid (table, row);\n\t\t\t}\n\n\t\t\tif (row+1 < numRows) recordNum++;\n\n\t\t\tdataHeight += lineHeight;\n\t\t}\n\t\telse\n\t\t{\n\t\t\t// Set the row height - when scrolling winDown, the heights of the last rows of\n\t\t\t// the table are used to determine how far to scroll. As rows are deleted\n\t\t\t// from the top of the table, formerly unused rows scroll into view, and the\n\t\t\t// height is used before the next call to ListViewLoadTable (which would set\n\t\t\t// the height correctly).\n\t\t\tTblSetRowHeight (table, row, lineHeight);\n\n\t\t\tTblSetRowUsable (table, row, false);\n\t\t}\n\t}\n\n\n\t// Update the scroll arrows.\n\tListViewUpdateScrollers (frm);\n}", "docstring": "/***********************************************************************\n *\n * FUNCTION: ListViewLoadTable\n *\n * DESCRIPTION: This routine loads memo database records into\n * the list view form.\n *\n * PARAMETERS: recordNum index of the first record to display.\n *\n * RETURNED: nothing\n *\n * REVISION HISTORY:\n *\t\t\tName\tDate\t\tDescription\n *\t\t\t----\t----\t\t-----------\n *\t\t\tart\t2/16/95\tInitial Revision\n *\t\t\tgrant\t1/29/99\tSet the heights of unused rows\n *\t\t\tryw\t1/11/01\tupdate global TopRowPositionInCategory on table load\n *\n ***********************************************************************/", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/MemoPad/MemoMain.c#L3046-L3132", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "duk_get_top_require_min", "code": "DUK_INTERNAL duk_idx_t duk_get_top_require_min(duk_hthread *thr, duk_idx_t min_top) {\n\tduk_idx_t ret;\n\n\tDUK_ASSERT_API_ENTRY(thr);\n\n\tret = (duk_idx_t) (thr->valstack_top - thr->valstack_bottom);\n\tif (DUK_UNLIKELY(ret < min_top)) {\n\t\tDUK_ERROR_TYPE_INVALID_ARGS(thr);\n\t\tDUK_WO_NORETURN(return 0;);\n\t}\n\treturn ret;\n}", "docstring": "/* Internal helper to get current top but to require a minimum top value\n * (TypeError if not met).\n */", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/duktape/duk_api_stack.c#L412-L423", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "duk_join", "code": "DUK_EXTERNAL void duk_join(duk_hthread *thr, duk_idx_t count) {\n\tDUK_ASSERT_API_ENTRY(thr);\n\n\tduk__concat_and_join_helper(thr, count, 1 /*is_join*/);\n}", "docstring": "/* DUK_USE_PREFER_SIZE */", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/duktape/duk_api_string.c#L161-L165", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "duk_debug_write_uint", "code": "DUK_INTERNAL void duk_debug_write_uint(duk_hthread *thr, duk_uint32_t x) {\n\t/* The debugger protocol doesn't support a plain integer encoding for\n\t * the full 32-bit unsigned range (only 32-bit signed). For now,\n\t * unsigned 32-bit values simply written as signed ones. This is not\n\t * a concrete issue except for 32-bit heaphdr fields. Proper solutions\n\t * would be to (a) write such integers as IEEE doubles or (b) add an\n\t * unsigned 32-bit dvalue.\n\t */\n\tif (x >= 0x80000000UL) {\n\t\tDUK_D(DUK_DPRINT(\"writing unsigned integer 0x%08lx as signed integer\",\n\t\t (long) x));\n\t}\n\tduk_debug_write_int(thr, (duk_int32_t) x);\n}", "docstring": "/* Write unsigned 32-bit integer. */", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/duktape/duk_debugger.c#L758-L771", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "duk__bi_mul", "code": "DUK_LOCAL void duk__bi_mul(duk__bigint *x, duk__bigint *y, duk__bigint *z) {\n\tduk_small_int_t i, j, nx, nz;\n\n\tDUK_ASSERT(duk__bi_is_valid(y));\n\tDUK_ASSERT(duk__bi_is_valid(z));\n\n\tnx = y->n + z->n; /* max possible */\n\tDUK_ASSERT(nx <= DUK__BI_MAX_PARTS);\n\n\tif (nx == 0) {\n\t\t/* Both inputs are zero; cases where only one is zero can go\n\t\t * through main algorithm.\n\t\t */\n\t\tx->n = 0;\n\t\treturn;\n\t}\n\n\tduk_memzero((void *) x->v, (size_t) (sizeof(duk_uint32_t) * (size_t) nx));\n\tx->n = nx;\n\n\tnz = z->n;\n\tfor (i = 0; i < y->n; i++) {\n#if defined(DUK_USE_64BIT_OPS)\n\t\tduk_uint64_t tmp = 0U;\n\t\tfor (j = 0; j < nz; j++) {\n\t\t\ttmp += (duk_uint64_t) y->v[i] * (duk_uint64_t) z->v[j] + x->v[i+j];\n\t\t\tx->v[i+j] = (duk_uint32_t) (tmp & 0xffffffffUL);\n\t\t\ttmp = tmp >> 32;\n\t\t}\n\t\tif (tmp > 0) {\n\t\t\tDUK_ASSERT(i + j < nx);\n\t\t\tDUK_ASSERT(i + j < DUK__BI_MAX_PARTS);\n\t\t\tDUK_ASSERT(x->v[i+j] == 0U);\n\t\t\tx->v[i+j] = (duk_uint32_t) tmp;\n\t\t}\n#else\n\t\t/*\n\t\t * Multiply + add + carry for 32-bit components using only 16x16->32\n\t\t * multiplies and carry detection based on unsigned overflow.\n\t\t *\n\t\t * 1st mult, 32-bit: (A*2^16 + B)\n\t\t * 2nd mult, 32-bit: (C*2^16 + D)\n\t\t * 3rd add, 32-bit: E\n\t\t * 4th add, 32-bit: F\n\t\t *\n\t\t * (AC*2^16 + B) * (C*2^16 + D) + E + F\n\t\t * = AC*2^32 + AD*2^16 + BC*2^16 + BD + E + F\n\t\t * = AC*2^32 + (AD + BC)*2^16 + (BD + E + F)\n\t\t * = AC*2^32 + AD*2^16 + BC*2^16 + (BD + E + F)\n\t\t */\n\t\tduk_uint32_t a, b, c, d, e, f;\n\t\tduk_uint32_t r, s, t;\n\n\t\ta = y->v[i]; b = a & 0xffffUL; a = a >> 16;\n\n\t\tf = 0;\n\t\tfor (j = 0; j < nz; j++) {\n\t\t\tc = z->v[j]; d = c & 0xffffUL; c = c >> 16;\n\t\t\te = x->v[i+j];\n\n\t\t\t/* build result as: (r << 32) + s: start with (BD + E + F) */\n\t\t\tr = 0;\n\t\t\ts = b * d;\n\n\t\t\t/* add E */\n\t\t\tt = s + e;\n\t\t\tif (t < s) { r++; } /* carry */\n\t\t\ts = t;\n\n\t\t\t/* add F */\n\t\t\tt = s + f;\n\t\t\tif (t < s) { r++; } /* carry */\n\t\t\ts = t;\n\n\t\t\t/* add BC*2^16 */\n\t\t\tt = b * c;\n\t\t\tr += (t >> 16);\n\t\t\tt = s + ((t & 0xffffUL) << 16);\n\t\t\tif (t < s) { r++; } /* carry */\n\t\t\ts = t;\n\n\t\t\t/* add AD*2^16 */\n\t\t\tt = a * d;\n\t\t\tr += (t >> 16);\n\t\t\tt = s + ((t & 0xffffUL) << 16);\n\t\t\tif (t < s) { r++; } /* carry */\n\t\t\ts = t;\n\n\t\t\t/* add AC*2^32 */\n\t\t\tt = a * c;\n\t\t\tr += t;\n\n\t\t\tDUK_DDD(DUK_DDDPRINT(\"ab=%08lx cd=%08lx ef=%08lx -> rs=%08lx %08lx\",\n\t\t\t (unsigned long) y->v[i], (unsigned long) z->v[j],\n\t\t\t (unsigned long) x->v[i+j], (unsigned long) r,\n\t\t\t (unsigned long) s));\n\n\t\t\tx->v[i+j] = s;\n\t\t\tf = r;\n\t\t}\n\t\tif (f > 0U) {\n\t\t\tDUK_ASSERT(i + j < nx);\n\t\t\tDUK_ASSERT(i + j < DUK__BI_MAX_PARTS);\n\t\t\tDUK_ASSERT(x->v[i+j] == 0U);\n\t\t\tx->v[i+j] = (duk_uint32_t) f;\n\t\t}\n#endif /* DUK_USE_64BIT_OPS */\n\t}\n\n\tduk__bi_normalize(x);\n\tDUK_ASSERT(duk__bi_is_valid(x));\n}", "docstring": "/* x <- y * z */", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/duktape/duk_numconv.c#L396-L507", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "read_restart_marker", "code": "METHODDEF(boolean)\nread_restart_marker (j_decompress_ptr cinfo)\n{\n /* Obtain a marker unless we already did. */\n /* Note that next_marker will complain if it skips any data. */\n if (cinfo->unread_marker == 0) {\n if (! next_marker(cinfo))\n return FALSE;\n }\n\n if (cinfo->unread_marker ==\n ((int) M_RST0 + cinfo->marker->next_restart_num)) {\n /* Normal case --- swallow the marker and let entropy decoder continue */\n TRACEMS1(cinfo, 3, JTRC_RST, cinfo->marker->next_restart_num);\n cinfo->unread_marker = 0;\n } else {\n /* Uh-oh, the restart markers have been messed up. */\n /* Let the data source manager determine how to resync. */\n if (! (*cinfo->src->resync_to_restart) (cinfo,\n\t\t\t\t\t cinfo->marker->next_restart_num))\n return FALSE;\n }\n\n /* Update next-restart state */\n cinfo->marker->next_restart_num = (cinfo->marker->next_restart_num + 1) & 7;\n\n return TRUE;\n}", "docstring": "/*\n * Read a restart marker, which is expected to appear next in the datastream;\n * if the marker is not there, take appropriate recovery action.\n * Returns FALSE if suspension is required.\n *\n * This is called by the entropy decoder after it has read an appropriate\n * number of MCUs. cinfo->unread_marker may be nonzero if the entropy decoder\n * has already read a marker from the data source. Under normal conditions\n * cinfo->unread_marker will be reset to 0 before returning; if not reset,\n * it holds a marker which the decoder will be unable to read past.\n */", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/jpeg-8d/jdmarker.c#L1158-L1185", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "absx6502", "code": "void absx6502(m6502_t *m6502) {\n SAVEPC = RM16(PC);\n PC++;\n PC++;\n if (m6502_cycles[m6502->opcode] == 4)\n if ((SAVEPC >> 8) != ((SAVEPC + X) >> 8))\n m6502->clockticks6502++;\n SAVEPC += X;\n}", "docstring": "// ABS,X", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/libemulation/m6502.c#L224-L232", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "andcc", "code": "static void andcc(m6809_t *m6809)\n{\n uint8_t t;\n IMMBYTE(t);\n CC &= t;\n CHECK_IRQ_LINES;\n}", "docstring": "// case 0x1B: //ILLEGAL\n// case 0x1C: //ANDCC immediate #####", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/libemulation/m6809ops.c#L223-L229", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sys_country", "code": "int sys_country(char *country, int len) {\n int r = -1;\n\n#ifdef WINDOWS\n char buf[32];\n GEOID myGEO = GetUserGeoID(GEOCLASS_NATION);\n if (GetGeoInfoA(myGEO, GEO_ISO2, buf, sizeof(buf), 0)) {\n sys_strncpy(country, buf, len-1);\n r = 0;\n }\n#else\n char *s, *p, buf[32];\n if ((s = getenv(\"LANG\")) != NULL) {\n sys_strncpy(buf, s, sizeof(buf)-1);\n if ((p = sys_strchr(buf, '.')) != NULL) {\n // \"pt_BR.UTF-8\" -> \"pt_BR\"\n *p = 0;\n }\n if (!sys_strcmp(buf, \"C\")) {\n sys_strncpy(buf, EN_US, sizeof(buf)-1);\n }\n if ((p = sys_strchr(buf, '_')) != NULL) {\n // \"pt_BR\" -> \"BR\"\n sys_strncpy(country, p+1, len-1);\n r = 0;\n }\n }\n#endif\n\n return r;\n}", "docstring": "/*\nThe name of a locale consists of language codes, character encoding, and the description of a selected variant.\n\nA name starts with an ISO 639-1 lowercase two-letter language code, or an ISO 639-2 three-letter language code if the language has no two-letter code. For example, it is de for German, fr for French, and cel for Celtic. The code is followed for many but not all languages by an underscore _ and by an ISO 3166 uppercase two-letter country code. For example, this leads to de_CH for Swiss German, and fr_CA for a French-speaking system for a Canadian user likely to be located in Quebec.\n\nOptionally, a dot . follows the name of the character encoding such as UTF-8, or ISO-8859-1, and the @ sign followed by the name of a variant. For example, the name en_IE.UTF-8@euro describes the setup for an English system for Ireland with UTF-8 character encoding, and the Euro as the currency symbol.\n*/", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/libpit/sys.c#L344-L374", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SW_FT_Vector_Length", "code": "SW_FT_Fixed SW_FT_Vector_Length(SW_FT_Vector* vec)\n{\n SW_FT_Int shift;\n SW_FT_Vector v;\n\n v = *vec;\n\n /* handle trivial cases */\n if (v.x == 0) {\n return SW_FT_ABS(v.y);\n } else if (v.y == 0) {\n return SW_FT_ABS(v.x);\n }\n\n /* general case */\n shift = ft_trig_prenorm(&v);\n ft_trig_pseudo_polarize(&v);\n\n v.x = ft_trig_downscale(v.x);\n\n if (shift > 0) return (v.x + (1 << (shift - 1))) >> shift;\n\n return (SW_FT_Fixed)((SW_FT_UInt32)v.x << -shift);\n}", "docstring": "/* documentation is in fttrigon.h */", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/libpluto/sw_ft_math.c#L388-L411", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "loadline", "code": "static int loadline (lua_State *L) {\n int status;\n lua_settop(L, 0);\n if (!pushline(L, 1))\n return -1; /* no input */\n if ((status = addreturn(L)) != LUA_OK) /* 'return ...' did not work? */\n status = multiline(L); /* try as command, maybe with continuation lines */\n lua_remove(L, 1); /* remove line from the stack */\n lua_assert(lua_gettop(L) == 1);\n return status;\n}", "docstring": "/*\n** Read a line and try to load (compile) it first as an expression (by\n** adding \"return \" in front of it) and second as a statement. Return\n** the final status of load/call with the resulting function (if any)\n** in the top of the stack.\n*/", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/lua/lua.c#L372-L382", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SetOutFileDir", "code": "VOID\nSetOutFileDir(const char *sz)\n{\n if (sz && strcmp(sz, \".\") == 0)\n szOutFileDir = \"\";\n else\n szOutFileDir = sz;\n}", "docstring": "/*-----------------------------------------------------------------------------\n|\tSetOutFileDir\n|\t\n|\t\tSet output file path -- no trailing / or \\ \n-------------------------------------------------------------WESC------------*/", "url": "https://github.com/migueletto/PumpkinOS/blob/d31ba3aca31b73c8f1ce40dbf812ddffe44c22de/src/pilrc/util.c#L475-L482", "sha": "d31ba3aca31b73c8f1ce40dbf812ddffe44c22de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "begin_SDA_Read", "code": "void TFT_eSPI::begin_SDA_Read(void)\n{\n #ifdef TFT_SPI_OVERLAP\n // Reads in overlap mode not supported\n #else\n spi.end();\n #endif\n}", "docstring": "/***************************************************************************************\n** Function name: beginSDA\n** Description: Detach SPI from pin to permit software SPI\n***************************************************************************************/", "url": "https://github.com/Xinyuan-LilyGO/T-Display-S3-AMOLED/blob/edd133335c9f7c38d1e9be2d0eb67371f1f6428e/lib/TFT_eSPI/Processors/TFT_eSPI_ESP8266.c#L40-L47", "sha": "edd133335c9f7c38d1e9be2d0eb67371f1f6428e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lv_example_flex_5", "code": "void lv_example_flex_5(void)\n{\n lv_obj_t * cont = lv_obj_create(lv_scr_act());\n lv_obj_set_size(cont, 300, 220);\n lv_obj_center(cont);\n lv_obj_set_flex_flow(cont, LV_FLEX_FLOW_ROW_WRAP);\n\n uint32_t i;\n for(i = 0; i < 9; i++) {\n lv_obj_t * obj = lv_obj_create(cont);\n lv_obj_set_size(obj, 70, LV_SIZE_CONTENT);\n\n lv_obj_t * label = lv_label_create(obj);\n lv_label_set_text_fmt(label, \"%\"LV_PRIu32, i);\n lv_obj_center(label);\n }\n\n lv_anim_t a;\n lv_anim_init(&a);\n lv_anim_set_var(&a, cont);\n lv_anim_set_values(&a, 0, 10);\n lv_anim_set_repeat_count(&a, LV_ANIM_REPEAT_INFINITE);\n\n lv_anim_set_exec_cb(&a, row_gap_anim);\n lv_anim_set_time(&a, 500);\n lv_anim_set_playback_time(&a, 500);\n lv_anim_start(&a);\n\n lv_anim_set_exec_cb(&a, column_gap_anim);\n lv_anim_set_time(&a, 3000);\n lv_anim_set_playback_time(&a, 3000);\n lv_anim_start(&a);\n}", "docstring": "/**\n * Demonstrate the effect of column and row gap style properties\n */", "url": "https://github.com/Xinyuan-LilyGO/T-Display-S3-AMOLED/blob/edd133335c9f7c38d1e9be2d0eb67371f1f6428e/lib/lvgl/examples/layouts/flex/lv_example_flex_5.c#L17-L49", "sha": "edd133335c9f7c38d1e9be2d0eb67371f1f6428e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lv_example_style_9", "code": "void lv_example_style_9(void)\n{\n static lv_style_t style;\n lv_style_init(&style);\n\n lv_style_set_line_color(&style, lv_palette_main(LV_PALETTE_GREY));\n lv_style_set_line_width(&style, 6);\n lv_style_set_line_rounded(&style, true);\n\n /*Create an object with the new style*/\n lv_obj_t * obj = lv_line_create(lv_scr_act());\n lv_obj_add_style(obj, &style, 0);\n\n static lv_point_t p[] = {{10, 30}, {30, 50}, {100, 0}};\n lv_line_set_points(obj, p, 3);\n\n lv_obj_center(obj);\n}", "docstring": "/**\n * Using the line style properties\n */", "url": "https://github.com/Xinyuan-LilyGO/T-Display-S3-AMOLED/blob/edd133335c9f7c38d1e9be2d0eb67371f1f6428e/lib/lvgl/examples/styles/lv_example_style_9.c#L7-L24", "sha": "edd133335c9f7c38d1e9be2d0eb67371f1f6428e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lv_fs_fatfs_init", "code": "void lv_fs_fatfs_init(void)\n{\n /*----------------------------------------------------\n * Initialize your storage device and File System\n * -------------------------------------------------*/\n fs_init();\n\n /*---------------------------------------------------\n * Register the file system interface in LVGL\n *--------------------------------------------------*/\n\n /*Add a simple drive to open images*/\n static lv_fs_drv_t fs_drv; /*A driver descriptor*/\n lv_fs_drv_init(&fs_drv);\n\n /*Set up fields...*/\n fs_drv.letter = LV_FS_FATFS_LETTER;\n fs_drv.cache_size = LV_FS_FATFS_CACHE_SIZE;\n\n fs_drv.open_cb = fs_open;\n fs_drv.close_cb = fs_close;\n fs_drv.read_cb = fs_read;\n fs_drv.write_cb = fs_write;\n fs_drv.seek_cb = fs_seek;\n fs_drv.tell_cb = fs_tell;\n\n fs_drv.dir_close_cb = fs_dir_close;\n fs_drv.dir_open_cb = fs_dir_open;\n fs_drv.dir_read_cb = fs_dir_read;\n\n lv_fs_drv_register(&fs_drv);\n}", "docstring": "/**********************\n * STATIC VARIABLES\n **********************/\n/**********************\n * MACROS\n **********************/\n/**********************\n * GLOBAL FUNCTIONS\n **********************/", "url": "https://github.com/Xinyuan-LilyGO/T-Display-S3-AMOLED/blob/edd133335c9f7c38d1e9be2d0eb67371f1f6428e/lib/lvgl/src/extra/libs/fsdrv/lv_fs_fatfs.c#L53-L84", "sha": "edd133335c9f7c38d1e9be2d0eb67371f1f6428e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fillRectangle", "code": "static void fillRectangle(int left, int top, int width, int height, uint8_t qrcode[]) {\n\tfor (int dy = 0; dy < height; dy++) {\n\t\tfor (int dx = 0; dx < width; dx++)\n\t\t\tsetModule(qrcode, left + dx, top + dy, true);\n\t}\n}", "docstring": "// Sets every pixel in the range [left : left + width] * [top : top + height] to black.", "url": "https://github.com/Xinyuan-LilyGO/T-Display-S3-AMOLED/blob/edd133335c9f7c38d1e9be2d0eb67371f1f6428e/lib/lvgl/src/extra/libs/qrcode/qrcodegen.c#L559-L564", "sha": "edd133335c9f7c38d1e9be2d0eb67371f1f6428e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lv_switch_trigger_anim", "code": "static void lv_switch_trigger_anim(lv_obj_t * obj)\n{\n LV_ASSERT_OBJ(obj, MY_CLASS);\n lv_switch_t * sw = (lv_switch_t *)obj;\n\n uint32_t anim_dur_full = lv_obj_get_style_anim_time(obj, LV_PART_MAIN);\n\n if(anim_dur_full > 0) {\n bool chk = lv_obj_get_state(obj) & LV_STATE_CHECKED;\n int32_t anim_start;\n int32_t anim_end;\n /*No animation in progress -> simply set the values*/\n if(sw->anim_state == LV_SWITCH_ANIM_STATE_INV) {\n anim_start = chk ? LV_SWITCH_ANIM_STATE_START : LV_SWITCH_ANIM_STATE_END;\n anim_end = chk ? LV_SWITCH_ANIM_STATE_END : LV_SWITCH_ANIM_STATE_START;\n }\n /*Animation in progress. Start from the animation end value*/\n else {\n anim_start = sw->anim_state;\n anim_end = chk ? LV_SWITCH_ANIM_STATE_END : LV_SWITCH_ANIM_STATE_START;\n }\n /*Calculate actual animation duration*/\n uint32_t anim_dur = (anim_dur_full * LV_ABS(anim_start - anim_end)) / LV_SWITCH_ANIM_STATE_END;\n\n /*Stop the previous animation if it exists*/\n lv_anim_del(sw, NULL);\n\n lv_anim_t a;\n lv_anim_init(&a);\n lv_anim_set_var(&a, sw);\n lv_anim_set_exec_cb(&a, lv_switch_anim_exec_cb);\n lv_anim_set_values(&a, anim_start, anim_end);\n lv_anim_set_ready_cb(&a, lv_switch_anim_ready);\n lv_anim_set_time(&a, anim_dur);\n lv_anim_start(&a);\n }\n}", "docstring": "/**\n * Starts an animation for the switch knob. if the anim_time style property is greater than 0\n * @param obj the switch to animate\n */", "url": "https://github.com/Xinyuan-LilyGO/T-Display-S3-AMOLED/blob/edd133335c9f7c38d1e9be2d0eb67371f1f6428e/lib/lvgl/src/widgets/lv_switch.c#L238-L274", "sha": "edd133335c9f7c38d1e9be2d0eb67371f1f6428e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "adc_init", "code": "void adc_init( void )\n{\n\t// ADCCLK = 24 MHz => RCC_ADCPRE = 0: divide by 2\n\tRCC->CFGR0 &= ~(0x1F<<11);\n\t\n\t// Enable GPIOD and ADC\n\tRCC->APB2PCENR |= RCC_APB2Periph_GPIOD | RCC_APB2Periph_ADC1;\n\t\n\t// PD4 is analog input chl 7\n\tGPIOD->CFGLR &= ~(0xf<<(4*4));\t// CNF = 00: Analog, MODE = 00: Input\n\t\n\t// Reset the ADC to init all regs\n\tRCC->APB2PRSTR |= RCC_APB2Periph_ADC1;\n\tRCC->APB2PRSTR &= ~RCC_APB2Periph_ADC1;\n\t\n\t// Set up single conversion on chl 7\n\tADC1->RSQR1 = 0;\n\tADC1->RSQR2 = 0;\n\tADC1->RSQR3 = 7;\t// 0-9 for 8 ext inputs and two internals\n\t\n\t// set sampling time for chl 7\n\tADC1->SAMPTR2 &= ~(ADC_SMP0<<(3*7));\n\tADC1->SAMPTR2 |= 7<<(3*7);\t// 0:7 => 3/9/15/30/43/57/73/241 cycles\n\t\t\n\t// turn on ADC and set rule group to sw trig\n\tADC1->CTLR2 |= ADC_ADON | ADC_EXTSEL;\n\t\n\t// Reset calibration\n\tADC1->CTLR2 |= ADC_RSTCAL;\n\twhile(ADC1->CTLR2 & ADC_RSTCAL);\n\t\n\t// Calibrate\n\tADC1->CTLR2 |= ADC_CAL;\n\twhile(ADC1->CTLR2 & ADC_CAL);\n\t\n\t// should be ready for SW conversion now\n}", "docstring": "/*\n * initialize adc for polling\n */", "url": "https://github.com/cnlohr/ch32v003fun/blob/92b39c8984cab685ab59d6cc6c108de21e21d98a/examples/adc_polled/adc_polled.c#L12-L48", "sha": "92b39c8984cab685ab59d6cc6c108de21e21d98a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EXTI_Line_Init", "code": "void EXTI_Line_Init(void)\n{\n GPIO_InitTypeDef GPIO_InitStructure = {0};\n EXTI_InitTypeDef EXTI_InitStructure = {0};\n NVIC_InitTypeDef NVIC_InitStructure = {0};\n\n RCC_APB2PeriphClockCmd(RCC_APB2Periph_AFIO | RCC_APB2Periph_GPIOC, ENABLE);\n\n GPIO_InitStructure.GPIO_Pin = GPIO_Pin_7;\n GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IN_FLOATING;\n GPIO_Init(GPIOC, &GPIO_InitStructure);\n\n /* GPIOC 7 ----> EXTI_Line7 */\n GPIO_EXTILineConfig(GPIO_PortSourceGPIOC, GPIO_PinSource7);\n EXTI_InitStructure.EXTI_Line = EXTI_Line7;\n EXTI_InitStructure.EXTI_Mode = EXTI_Mode_Interrupt;\n EXTI_InitStructure.EXTI_Trigger = EXTI_Trigger_Falling;\n EXTI_InitStructure.EXTI_LineCmd = ENABLE;\n EXTI_Init(&EXTI_InitStructure);\n\n NVIC_InitStructure.NVIC_IRQChannel = EXTI9_5_IRQn;\n NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 1;\n NVIC_InitStructure.NVIC_IRQChannelSubPriority = 2;\n NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE;\n NVIC_Init(&NVIC_InitStructure);\n}", "docstring": "/*********************************************************************\n * @fn EXTI_Line_Init\n *\n * @brief Configure EXTI Line7.\n *\n * @param none.\n *\n * @return none.\n */", "url": "https://github.com/Community-PIO-CH32V/platform-ch32v/blob/8af792530c1b16b24526f6e64e2ea1b383890047/examples/webserver-ch32v307-none-os/lib/NetLib/eth_driver_RMII.c#L172-L197", "sha": "8af792530c1b16b24526f6e64e2ea1b383890047"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FetchAesKetAndIv", "code": "VOID FetchAesKetAndIv(IN OUT PBYTE ctAesKey, IN OUT PBYTE ctAesIv) {\n\n\tfor (int i = 0; i < IV_SIZE; i++) {\n\t\tctAesIv[i] -= 0x03;\n\t}\n\tfor (int i = 0; i < KEY_SIZE; i++) {\n\t\tctAesKey[i] -= 0x03;\n\t}\n\tfor (int i = 0; i < IV_SIZE; i++) {\n\t\tctAesIv[i] ^= (BYTE)ctAesKey[0];\n\t}\n\tfor (int i = 1; i < KEY_SIZE; i++) {\n\t\tfor (int j = 0; j < IV_SIZE; j++) {\n\t\t\tctAesKey[i] ^= (BYTE)ctAesIv[j];\n\t\t}\n\t}\n}", "docstring": "/*\n\tfunction to decrypt the aes key and iv\n*/", "url": "https://github.com/NUL0x4C/AtomLdr/blob/f18cb75c28f312fb8edaa86ea7c65f148ff26d0d/AtomLdr/dllmain.c#L83-L99", "sha": "f18cb75c28f312fb8edaa86ea7c65f148ff26d0d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "recoverTransferSettings", "code": "static void recoverTransferSettings(sqlite3_recover *p){\n const char *aPragma[] = {\n \"encoding\",\n \"page_size\",\n \"auto_vacuum\",\n \"user_version\",\n \"application_id\"\n };\n int ii;\n\n /* Truncate the output database to 0 pages in size. This is done by \n ** opening a new, empty, temp db, then using the backup API to clobber \n ** any existing output db with a copy of it. */\n if( p->errCode==SQLITE_OK ){\n sqlite3 *db2 = 0;\n int rc = sqlite3_open(\"\", &db2);\n if( rc!=SQLITE_OK ){\n recoverDbError(p, db2);\n return;\n }\n\n for(ii=0; iidbIn, \"PRAGMA %Q.%s\", p->zDb, zPrag);\n if( p->errCode==SQLITE_OK && sqlite3_step(p1)==SQLITE_ROW ){\n const char *zArg = (const char*)sqlite3_column_text(p1, 0);\n char *z2 = recoverMPrintf(p, \"PRAGMA %s = %Q\", zPrag, zArg);\n recoverSqlCallback(p, z2);\n recoverExec(p, db2, z2);\n sqlite3_free(z2);\n if( zArg==0 ){\n recoverError(p, SQLITE_NOMEM, 0);\n }\n }\n recoverFinalize(p, p1);\n }\n recoverExec(p, db2, \"CREATE TABLE t1(a); DROP TABLE t1;\");\n\n if( p->errCode==SQLITE_OK ){\n sqlite3 *db = p->dbOut;\n sqlite3_backup *pBackup = sqlite3_backup_init(db, \"main\", db2, \"main\");\n if( pBackup ){\n sqlite3_backup_step(pBackup, -1);\n p->errCode = sqlite3_backup_finish(pBackup);\n }else{\n recoverDbError(p, db);\n }\n }\n\n sqlite3_close(db2);\n }\n}", "docstring": "/*\n** Transfer the following settings from the input database to the output\n** database:\n**\n** + page-size,\n** + auto-vacuum settings,\n** + database encoding,\n** + user-version (PRAGMA user_version), and\n** + application-id (PRAGMA application_id), and\n*/", "url": "https://github.com/duckdb/duckdb-spatial/blob/aa95ed8049da630da45e763f3bb6e9aeb1891e34/deps/vendor/sqlite3/src/shell.c#L13507-L13559", "sha": "aa95ed8049da630da45e763f3bb6e9aeb1891e34"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "memsys5Size", "code": "static int memsys5Size(void *p){\n int iSize, i;\n assert( p!=0 );\n i = (int)(((u8 *)p-mem5.zPool)/mem5.szAtom);\n assert( i>=0 && ieState==PAGER_OPEN );\n assert( pPager->eLock>=SHARED_LOCK );\n\n if( !pPager->tempFile ){\n int isWal; /* True if WAL file exists */\n rc = sqlite3OsAccess(\n pPager->pVfs, pPager->zWal, SQLITE_ACCESS_EXISTS, &isWal\n );\n if( rc==SQLITE_OK ){\n if( isWal ){\n Pgno nPage; /* Size of the database file */\n\n rc = pagerPagecount(pPager, &nPage);\n if( rc ) return rc;\n if( nPage==0 ){\n rc = sqlite3OsDelete(pPager->pVfs, pPager->zWal, 0);\n }else{\n testcase( sqlite3PcachePagecount(pPager->pPCache)==0 );\n rc = sqlite3PagerOpenWal(pPager, 0);\n }\n }else if( pPager->journalMode==PAGER_JOURNALMODE_WAL ){\n pPager->journalMode = PAGER_JOURNALMODE_DELETE;\n }\n }\n }\n return rc;\n}", "docstring": "/*\n** Check if the *-wal file that corresponds to the database opened by pPager\n** exists if the database is not empy, or verify that the *-wal file does\n** not exist (by deleting it) if the database file is empty.\n**\n** If the database is not empty and the *-wal file exists, open the pager\n** in WAL mode. If the database is empty or if no *-wal file exists and\n** if no error occurs, make sure Pager.journalMode is not set to\n** PAGER_JOURNALMODE_WAL.\n**\n** Return SQLITE_OK or an error code.\n**\n** The caller must hold a SHARED lock on the database file to call this\n** function. Because an EXCLUSIVE lock on the db file is required to delete\n** a WAL on a none-empty database, this ensures there is no race condition\n** between the xAccess() below and an xDelete() being executed by some\n** other connection.\n*/", "url": "https://github.com/duckdb/duckdb-spatial/blob/aa95ed8049da630da45e763f3bb6e9aeb1891e34/deps/vendor/sqlite3/src/sqlite3.c#L58481-L58509", "sha": "aa95ed8049da630da45e763f3bb6e9aeb1891e34"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3WalLimit", "code": "SQLITE_PRIVATE void sqlite3WalLimit(Wal *pWal, i64 iLimit){\n if( pWal ) pWal->mxWalSize = iLimit;\n}", "docstring": "/*\n** Change the size to which the WAL file is trucated on each reset.\n*/", "url": "https://github.com/duckdb/duckdb-spatial/blob/aa95ed8049da630da45e763f3bb6e9aeb1891e34/deps/vendor/sqlite3/src/sqlite3.c#L64512-L64514", "sha": "aa95ed8049da630da45e763f3bb6e9aeb1891e34"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3WalBeginReadTransaction", "code": "SQLITE_PRIVATE int sqlite3WalBeginReadTransaction(Wal *pWal, int *pChanged){\n int rc; /* Return code */\n int cnt = 0; /* Number of TryBeginRead attempts */\n#ifdef SQLITE_ENABLE_SNAPSHOT\n int bChanged = 0;\n WalIndexHdr *pSnapshot = pWal->pSnapshot;\n#endif\n\n assert( pWal->ckptLock==0 );\n\n#ifdef SQLITE_ENABLE_SNAPSHOT\n if( pSnapshot ){\n if( memcmp(pSnapshot, &pWal->hdr, sizeof(WalIndexHdr))!=0 ){\n bChanged = 1;\n }\n\n /* It is possible that there is a checkpointer thread running\n ** concurrent with this code. If this is the case, it may be that the\n ** checkpointer has already determined that it will checkpoint\n ** snapshot X, where X is later in the wal file than pSnapshot, but\n ** has not yet set the pInfo->nBackfillAttempted variable to indicate\n ** its intent. To avoid the race condition this leads to, ensure that\n ** there is no checkpointer process by taking a shared CKPT lock\n ** before checking pInfo->nBackfillAttempted. */\n (void)walEnableBlocking(pWal);\n rc = walLockShared(pWal, WAL_CKPT_LOCK);\n walDisableBlocking(pWal);\n\n if( rc!=SQLITE_OK ){\n return rc;\n }\n pWal->ckptLock = 1;\n }\n#endif\n\n do{\n rc = walTryBeginRead(pWal, pChanged, 0, ++cnt);\n }while( rc==WAL_RETRY );\n testcase( (rc&0xff)==SQLITE_BUSY );\n testcase( (rc&0xff)==SQLITE_IOERR );\n testcase( rc==SQLITE_PROTOCOL );\n testcase( rc==SQLITE_OK );\n\n#ifdef SQLITE_ENABLE_SNAPSHOT\n if( rc==SQLITE_OK ){\n if( pSnapshot && memcmp(pSnapshot, &pWal->hdr, sizeof(WalIndexHdr))!=0 ){\n /* At this point the client has a lock on an aReadMark[] slot holding\n ** a value equal to or smaller than pSnapshot->mxFrame, but pWal->hdr\n ** is populated with the wal-index header corresponding to the head\n ** of the wal file. Verify that pSnapshot is still valid before\n ** continuing. Reasons why pSnapshot might no longer be valid:\n **\n ** (1) The WAL file has been reset since the snapshot was taken.\n ** In this case, the salt will have changed.\n **\n ** (2) A checkpoint as been attempted that wrote frames past\n ** pSnapshot->mxFrame into the database file. Note that the\n ** checkpoint need not have completed for this to cause problems.\n */\n volatile WalCkptInfo *pInfo = walCkptInfo(pWal);\n\n assert( pWal->readLock>0 || pWal->hdr.mxFrame==0 );\n assert( pInfo->aReadMark[pWal->readLock]<=pSnapshot->mxFrame );\n\n /* Check that the wal file has not been wrapped. Assuming that it has\n ** not, also check that no checkpointer has attempted to checkpoint any\n ** frames beyond pSnapshot->mxFrame. If either of these conditions are\n ** true, return SQLITE_ERROR_SNAPSHOT. Otherwise, overwrite pWal->hdr\n ** with *pSnapshot and set *pChanged as appropriate for opening the\n ** snapshot. */\n if( !memcmp(pSnapshot->aSalt, pWal->hdr.aSalt, sizeof(pWal->hdr.aSalt))\n && pSnapshot->mxFrame>=pInfo->nBackfillAttempted\n ){\n assert( pWal->readLock>0 );\n memcpy(&pWal->hdr, pSnapshot, sizeof(WalIndexHdr));\n *pChanged = bChanged;\n }else{\n rc = SQLITE_ERROR_SNAPSHOT;\n }\n\n /* A client using a non-current snapshot may not ignore any frames\n ** from the start of the wal file. This is because, for a system\n ** where (minFrame < iSnapshot < maxFrame), a checkpointer may\n ** have omitted to checkpoint a frame earlier than minFrame in\n ** the file because there exists a frame after iSnapshot that\n ** is the same database page. */\n pWal->minFrame = 1;\n\n if( rc!=SQLITE_OK ){\n sqlite3WalEndReadTransaction(pWal);\n }\n }\n }\n\n /* Release the shared CKPT lock obtained above. */\n if( pWal->ckptLock ){\n assert( pSnapshot );\n walUnlockShared(pWal, WAL_CKPT_LOCK);\n pWal->ckptLock = 0;\n }\n#endif\n return rc;\n}", "docstring": "/* SQLITE_ENABLE_SNAPSHOT */\n/*\n** Begin a read transaction on the database.\n**\n** This routine used to be called sqlite3OpenSnapshot() and with good reason:\n** it takes a snapshot of the state of the WAL and wal-index for the current\n** instant in time. The current thread will continue to use this snapshot.\n** Other threads might append new content to the WAL and wal-index but\n** that extra content is ignored by the current thread.\n**\n** If the database contents have changes since the previous read\n** transaction, then *pChanged is set to 1 before returning. The\n** Pager layer will use this to know that its cache is stale and\n** needs to be flushed.\n*/", "url": "https://github.com/duckdb/duckdb-spatial/blob/aa95ed8049da630da45e763f3bb6e9aeb1891e34/deps/vendor/sqlite3/src/sqlite3.c#L65947-L66049", "sha": "aa95ed8049da630da45e763f3bb6e9aeb1891e34"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "vdbeAssertFieldCountWithinLimits", "code": "static void vdbeAssertFieldCountWithinLimits(\n int nKey, const void *pKey, /* The record to verify */\n const KeyInfo *pKeyInfo /* Compare size with this KeyInfo */\n){\n int nField = 0;\n u32 szHdr;\n u32 idx;\n u32 notUsed;\n const unsigned char *aKey = (const unsigned char*)pKey;\n\n if( CORRUPT_DB ) return;\n idx = getVarint32(aKey, szHdr);\n assert( nKey>=0 );\n assert( szHdr<=(u32)nKey );\n while( idxnAllField );\n}", "docstring": "/*\n** Count the number of fields (a.k.a. columns) in the record given by\n** pKey,nKey. The verify that this count is less than or equal to the\n** limit given by pKeyInfo->nAllField.\n**\n** If this constraint is not satisfied, it means that the high-speed\n** vdbeRecordCompareInt() and vdbeRecordCompareString() routines will\n** not work correctly. If this assert() ever fires, it probably means\n** that the KeyInfo.nKeyField or KeyInfo.nAllField values were computed\n** incorrectly.\n*/", "url": "https://github.com/duckdb/duckdb-spatial/blob/aa95ed8049da630da45e763f3bb6e9aeb1891e34/deps/vendor/sqlite3/src/sqlite3.c#L86235-L86254", "sha": "aa95ed8049da630da45e763f3bb6e9aeb1891e34"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "countOfViewOptimization", "code": "static int countOfViewOptimization(Parse *pParse, Select *p){\n Select *pSub, *pPrior;\n Expr *pExpr;\n Expr *pCount;\n sqlite3 *db;\n if( (p->selFlags & SF_Aggregate)==0 ) return 0; /* This is an aggregate */\n if( p->pEList->nExpr!=1 ) return 0; /* Single result column */\n if( p->pWhere ) return 0;\n if( p->pGroupBy ) return 0;\n pExpr = p->pEList->a[0].pExpr;\n if( pExpr->op!=TK_AGG_FUNCTION ) return 0; /* Result is an aggregate */\n assert( ExprUseUToken(pExpr) );\n if( sqlite3_stricmp(pExpr->u.zToken,\"count\") ) return 0; /* Is count() */\n assert( ExprUseXList(pExpr) );\n if( pExpr->x.pList!=0 ) return 0; /* Must be count(*) */\n if( p->pSrc->nSrc!=1 ) return 0; /* One table in FROM */\n pSub = p->pSrc->a[0].pSelect;\n if( pSub==0 ) return 0; /* The FROM is a subquery */\n if( pSub->pPrior==0 ) return 0; /* Must be a compound ry */\n do{\n if( pSub->op!=TK_ALL && pSub->pPrior ) return 0; /* Must be UNION ALL */\n if( pSub->pWhere ) return 0; /* No WHERE clause */\n if( pSub->pLimit ) return 0; /* No LIMIT clause */\n if( pSub->selFlags & SF_Aggregate ) return 0; /* Not an aggregate */\n pSub = pSub->pPrior; /* Repeat over compound */\n }while( pSub );\n\n /* If we reach this point then it is OK to perform the transformation */\n\n db = pParse->db;\n pCount = pExpr;\n pExpr = 0;\n pSub = p->pSrc->a[0].pSelect;\n p->pSrc->a[0].pSelect = 0;\n sqlite3SrcListDelete(db, p->pSrc);\n p->pSrc = sqlite3DbMallocZero(pParse->db, sizeof(*p->pSrc));\n while( pSub ){\n Expr *pTerm;\n pPrior = pSub->pPrior;\n pSub->pPrior = 0;\n pSub->pNext = 0;\n pSub->selFlags |= SF_Aggregate;\n pSub->selFlags &= ~SF_Compound;\n pSub->nSelectRow = 0;\n sqlite3ExprListDelete(db, pSub->pEList);\n pTerm = pPrior ? sqlite3ExprDup(db, pCount, 0) : pCount;\n pSub->pEList = sqlite3ExprListAppend(pParse, 0, pTerm);\n pTerm = sqlite3PExpr(pParse, TK_SELECT, 0, 0);\n sqlite3PExprAddSelect(pParse, pTerm, pSub);\n if( pExpr==0 ){\n pExpr = pTerm;\n }else{\n pExpr = sqlite3PExpr(pParse, TK_PLUS, pTerm, pExpr);\n }\n pSub = pPrior;\n }\n p->pEList->a[0].pExpr = pExpr;\n p->selFlags &= ~SF_Aggregate;\n\n#if TREETRACE_ENABLED\n if( sqlite3TreeTrace & 0x400 ){\n SELECTTRACE(0x400,pParse,p,(\"After count-of-view optimization:\\n\"));\n sqlite3TreeViewSelect(0, p, 0);\n }\n#endif\n return 1;\n}", "docstring": "/*\n** Attempt to transform a query of the form\n**\n** SELECT count(*) FROM (SELECT x FROM t1 UNION ALL SELECT y FROM t2)\n**\n** Into this:\n**\n** SELECT (SELECT count(*) FROM t1)+(SELECT count(*) FROM t2)\n**\n** The transformation only works if all of the following are true:\n**\n** * The subquery is a UNION ALL of two or more terms\n** * The subquery does not have a LIMIT clause\n** * There is no WHERE or GROUP BY or HAVING clauses on the subqueries\n** * The outer query is a simple count(*) with no WHERE clause or other\n** extraneous syntax.\n**\n** Return TRUE if the optimization is undertaken.\n*/", "url": "https://github.com/duckdb/duckdb-spatial/blob/aa95ed8049da630da45e763f3bb6e9aeb1891e34/deps/vendor/sqlite3/src/sqlite3.c#L143883-L143949", "sha": "aa95ed8049da630da45e763f3bb6e9aeb1891e34"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "khrIcdOsLibraryUnload", "code": "void khrIcdOsLibraryUnload(void *library)\n{\n dlclose(library);\n}", "docstring": "// unload a library", "url": "https://github.com/ROCm/clr/blob/a8edb8d467ebb5678d2f4506bd01efd3aaeddcab/opencl/khronos/icd/loader/linux/icd_linux.c#L191-L194", "sha": "a8edb8d467ebb5678d2f4506bd01efd3aaeddcab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "handle_result", "code": "int\nhandle_result(\n PyObject *raw_result,\n char **res_target,\n int *status_target,\n PyObject **headers_target,\n PyObject *raw_path,\n const char *method\n)\n{\n /*\n * This calls handle_result_impl() internally, but\n * this function is the actual interface for handling a return value.\n *\n * The only extra thing that this does is write to the route log.\n */\n int res = handle_result_impl(\n raw_result,\n res_target,\n status_target,\n headers_target\n );\n\n return res;\n // Calling route_log is extremely slow\n if (res < 0)\n return -1;\n\n if (!route_log) return res;\n\n PyObject *args = Py_BuildValue(\n \"(iOs)\",\n *status_target,\n raw_path,\n method\n );\n\n if (!args)\n return -1;\n\n /*\n * A lot of errors related to memory corruption are traced\n * to here by debuggers.\n *\n * This is, more or less, a false positive! It's quite\n * unlikely that the actual cause of the issue is here.\n */\n PyObject *result = PyObject_Call(\n route_log,\n args,\n NULL\n );\n\n if (!result)\n {\n Py_DECREF(args);\n return -1;\n }\n\n Py_DECREF(result);\n Py_DECREF(args);\n\n return res;\n}", "docstring": "/*\n * Generate HTTP response components (i.e. the body, status, and headers) from\n * a route return value.\n *\n * The result passed should be a tuple, or body string. This function\n * does not call __view_result__(), as that is up to the caller.\n *\n * The body output parameter will be a string on the heap,\n * and is responsible for deallocating it with PyMem_Free()\n *\n * The status output parameter can be *any* integer (including non-HTTP\n * status codes). Validation is up to the caller.\n *\n * The headers will always be an ASGI headers iterable [(bytes_key, bytes_value), ...]\n *\n * If this function fails, the caller is not responsible for\n * deallocating or managing references of any of the parameters.\n */", "url": "https://github.com/ZeroIntensity/view.py/blob/7fe291b0feb5f5ae2da3fbf552010a4ee4e470ef/src/_view/results.c#L304-L367", "sha": "7fe291b0feb5f5ae2da3fbf552010a4ee4e470ef"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AM_rotate", "code": "void\nAM_rotate\n( fixed_t*\tx,\n fixed_t*\ty,\n angle_t\ta )\n{\n fixed_t tmpx;\n\n tmpx =\n\tFixedMul(*x,finecosine[a>>ANGLETOFINESHIFT])\n\t- FixedMul(*y,finesine[a>>ANGLETOFINESHIFT]);\n \n *y =\n\tFixedMul(*x,finesine[a>>ANGLETOFINESHIFT])\n\t+ FixedMul(*y,finecosine[a>>ANGLETOFINESHIFT]);\n\n *x = tmpx;\n}", "docstring": "//\n// Rotation in 2D.\n// Used to rotate player arrow line character.\n//", "url": "https://github.com/lukneu/doom-teletext/blob/b9f5fd1cd89a15150c4efcc02f63f6e0e7ce21f4/doom-teletext/am_map.c#L1178-L1195", "sha": "b9f5fd1cd89a15150c4efcc02f63f6e0e7ce21f4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Default_Handler", "code": "void Default_Handler(void) {\n while(1);\n}", "docstring": "/*----------------------------------------------------------------------------\n Default Handler for Exceptions / Interrupts\n *----------------------------------------------------------------------------*/", "url": "https://github.com/ARM-software/CMSIS_6/blob/1a1799c6c58c47e737bb19523483344a25d29895/CMSIS/CoreValidation/Layer/Target/CA9/RTE/Device/ARMCA9/startup_ARMCA9.c#L146-L148", "sha": "1a1799c6c58c47e737bb19523483344a25d29895"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ARM_USBH_PipeTransferGetResult", "code": "uint32_t ARM_USBH_PipeTransferGetResult (ARM_USBH_PIPE_HANDLE pipe_hndl) {\n return 0;\n}", "docstring": "/**\n\\fn int32_t ARM_USBH_PipeTransfer (ARM_USBH_PIPE_HANDLE pipe_hndl, uint32_t packet, uint8_t *data, uint32_t num)\n\\details\nThe function \\b ARM_USBH_PipeTransfer generates packets for sending or receiving data from an USB Endpoint.\n\nThe function specifies the buffer with data to send or for data to receive and the number of bytes to transfer (must be multiple of device endpoint maximum packet size for receive).\nIt also specifies \\ref USBH_packets with parameter \\em packet.\n\nThe function is non-blocking and returns as soon as the driver starts the operation on the specified pipe. During the operation it is not allowed to call this function again on the same pipe. Also the data buffer must stay allocated and the contents of data must not be modified.\n\nOperation is completed when the the requested number of data bytes have been transferred and is indicated with \\ref ARM_USBH_EVENT_TRANSFER_COMPLETE event.\nIt can also finish earlier on reception of different handshake tokens which are also indicated through \\ref USBH_pipe_events.\n \nTransfer operation can be aborted by calling \\ref ARM_USBH_PipeTransferAbort.\n*****************************************************************************************************************/", "url": "https://github.com/ARM-software/CMSIS_6/blob/1a1799c6c58c47e737bb19523483344a25d29895/CMSIS/Documentation/Doxygen/Driver/src/Driver_USBH.c#L399-L401", "sha": "1a1799c6c58c47e737bb19523483344a25d29895"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_TIM_IC_Stop_DMA", "code": "HAL_StatusTypeDef HAL_TIM_IC_Stop_DMA(TIM_HandleTypeDef *htim, uint32_t Channel)\n{\n HAL_StatusTypeDef status = HAL_OK;\n\n /* Check the parameters */\n assert_param(IS_TIM_CCX_INSTANCE(htim->Instance, Channel));\n assert_param(IS_TIM_DMA_CC_INSTANCE(htim->Instance));\n\n /* Disable the Input Capture channel */\n TIM_CCxChannelCmd(htim->Instance, Channel, TIM_CCx_DISABLE);\n\n switch (Channel)\n {\n case TIM_CHANNEL_1:\n {\n /* Disable the TIM Capture/Compare 1 DMA request */\n __HAL_TIM_DISABLE_DMA(htim, TIM_DMA_CC1);\n (void)HAL_DMA_Abort_IT(htim->hdma[TIM_DMA_ID_CC1]);\n break;\n }\n\n case TIM_CHANNEL_2:\n {\n /* Disable the TIM Capture/Compare 2 DMA request */\n __HAL_TIM_DISABLE_DMA(htim, TIM_DMA_CC2);\n (void)HAL_DMA_Abort_IT(htim->hdma[TIM_DMA_ID_CC2]);\n break;\n }\n\n case TIM_CHANNEL_3:\n {\n /* Disable the TIM Capture/Compare 3 DMA request */\n __HAL_TIM_DISABLE_DMA(htim, TIM_DMA_CC3);\n (void)HAL_DMA_Abort_IT(htim->hdma[TIM_DMA_ID_CC3]);\n break;\n }\n\n case TIM_CHANNEL_4:\n {\n /* Disable the TIM Capture/Compare 4 DMA request */\n __HAL_TIM_DISABLE_DMA(htim, TIM_DMA_CC4);\n (void)HAL_DMA_Abort_IT(htim->hdma[TIM_DMA_ID_CC4]);\n break;\n }\n\n default:\n status = HAL_ERROR;\n break;\n }\n\n if (status == HAL_OK)\n {\n /* Disable the Peripheral */\n __HAL_TIM_DISABLE(htim);\n\n /* Set the TIM channel state */\n TIM_CHANNEL_STATE_SET(htim, Channel, HAL_TIM_CHANNEL_STATE_READY);\n TIM_CHANNEL_N_STATE_SET(htim, Channel, HAL_TIM_CHANNEL_STATE_READY);\n }\n\n /* Return function status */\n return status;\n}", "docstring": "/**\n * @brief Stops the TIM Input Capture measurement in DMA mode.\n * @param htim TIM Input Capture handle\n * @param Channel TIM Channels to be disabled\n * This parameter can be one of the following values:\n * @arg TIM_CHANNEL_1: TIM Channel 1 selected\n * @arg TIM_CHANNEL_2: TIM Channel 2 selected\n * @arg TIM_CHANNEL_3: TIM Channel 3 selected\n * @arg TIM_CHANNEL_4: TIM Channel 4 selected\n * @retval HAL status\n */", "url": "https://github.com/ClusterM/fdskey/blob/f07ab82864a9f4a2e7b14b37f1837715edb0d0fe/FdsKey/Drivers/STM32G0xx_HAL_Driver/Src/stm32g0xx_hal_tim.c#L2542-L2604", "sha": "f07ab82864a9f4a2e7b14b37f1837715edb0d0fe"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_TIMEx_CommutCallback", "code": "__weak void HAL_TIMEx_CommutCallback(TIM_HandleTypeDef *htim)\n{\n /* Prevent unused argument(s) compilation warning */\n UNUSED(htim);\n\n /* NOTE : This function should not be modified, when the callback is needed,\n the HAL_TIMEx_CommutCallback could be implemented in the user file\n */\n}", "docstring": "/**\n * @}\n */\n/** @defgroup TIMEx_Exported_Functions_Group6 Extended Callbacks functions\n * @brief Extended Callbacks functions\n *\n@verbatim\n ==============================================================================\n ##### Extended Callbacks functions #####\n ==============================================================================\n [..]\n This section provides Extended TIM callback functions:\n (+) Timer Commutation callback\n (+) Timer Break callback\n\n@endverbatim\n * @{\n */\n/**\n * @brief Hall commutation changed callback in non-blocking mode\n * @param htim TIM handle\n * @retval None\n */", "url": "https://github.com/ClusterM/fdskey/blob/f07ab82864a9f4a2e7b14b37f1837715edb0d0fe/FdsKey/Drivers/STM32G0xx_HAL_Driver/Src/stm32g0xx_hal_tim_ex.c#L2606-L2614", "sha": "f07ab82864a9f4a2e7b14b37f1837715edb0d0fe"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "bn_read_le", "code": "void bn_read_le(const uint8_t *in_number, bignum256 *out_number)\n{\n\tint i;\n\tuint32_t temp = 0;\n\tfor (i = 0; i < 8; i++) {\n\t\t// invariant: temp = (in_number % 2^(32i)) >> 30i\n\t\t// get next limb = (in_number % 2^(32(i+1))) >> 32i\n\t\tuint32_t limb = read_le(in_number + i * 4);\n\t\t// temp = (in_number % 2^(32(i+1))) << 30i\n\t\ttemp |= limb << (2*i);\n\t\t// store 30 bits into val[i]\n\t\tout_number->val[i]= temp & 0x3FFFFFFF;\n\t\t// prepare temp for next round\n\t\ttemp = limb >> (30 - 2*i);\n\t}\n\tout_number->val[8] = temp;\n}", "docstring": "// convert a raw little endian 256 bit value into a normalized bignum.\n// out_number is partly reduced (since it fits in 256 bit).", "url": "https://github.com/lnbits/nostr-signing-device/blob/1956e5933b3da5e49d30db9b8597497b157a5cbc/libraries/uBitcoin/src/utility/trezor/bignum.c#L126-L142", "sha": "1956e5933b3da5e49d30db9b8597497b157a5cbc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ggml_compute_forward_sgn_f32", "code": "static void ggml_compute_forward_sgn_f32(\n const struct ggml_compute_params * params,\n struct ggml_tensor * dst) {\n\n const struct ggml_tensor * src0 = dst->src[0];\n\n if (params->ith != 0) {\n return;\n }\n\n assert(ggml_is_contiguous_1(src0));\n assert(ggml_is_contiguous_1(dst));\n assert(ggml_are_same_shape(src0, dst));\n\n const int n = ggml_nrows(src0);\n const int nc = src0->ne[0];\n\n for (int i = 0; i < n; i++) {\n ggml_vec_sgn_f32(nc,\n (float *) ((char *) dst->data + i*( dst->nb[1])),\n (float *) ((char *) src0->data + i*(src0->nb[1])));\n }\n}", "docstring": "// ggml_compute_forward_sgn", "url": "https://github.com/ggerganov/llama.cpp/blob/4078c77f9891831f29ffc7c315c8ec6695ba5ce7/ggml/src/ggml-cpu/ggml-cpu.c#L6205-L6227", "sha": "4078c77f9891831f29ffc7c315c8ec6695ba5ce7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ggml_compute_forward_soft_max_ext_back_f32", "code": "static void ggml_compute_forward_soft_max_ext_back_f32(\n const struct ggml_compute_params * params,\n struct ggml_tensor * dst) {\n\n const struct ggml_tensor * src0 = dst->src[0];\n const struct ggml_tensor * src1 = dst->src[1];\n\n GGML_ASSERT(ggml_is_contiguous(src0));\n GGML_ASSERT(ggml_is_contiguous(src1));\n GGML_ASSERT(ggml_is_contiguous(dst));\n GGML_ASSERT(ggml_are_same_shape(src0, dst));\n GGML_ASSERT(ggml_are_same_shape(src1, dst));\n\n float scale = 1.0f;\n float max_bias = 0.0f;\n\n memcpy(&scale, (const float *) dst->op_params + 0, sizeof(float));\n memcpy(&max_bias, (const float *) dst->op_params + 1, sizeof(float));\n\n GGML_ASSERT(max_bias == 0.0f);\n\n // TODO: handle transposed/permuted matrices\n\n const int ith = params->ith;\n const int nth = params->nth;\n\n const int nc = src0->ne[0];\n const int nr = ggml_nrows(src0);\n\n // rows per thread\n const int dr = (nr + nth - 1)/nth;\n\n // row range for this thread\n const int ir0 = dr*ith;\n const int ir1 = MIN(ir0 + dr, nr);\n\n for (int i1 = ir0; i1 < ir1; i1++) {\n float *dy = (float *)((char *) src0->data + i1*src0->nb[1]);\n float *y = (float *)((char *) src1->data + i1*src1->nb[1]);\n float *dx = (float *)((char *) dst->data + i1*dst->nb[1]);\n\n#ifndef NDEBUG\n for (int i = 0; i < nc; ++i) {\n //printf(\"p[%d] = %f\\n\", i, p[i]);\n assert(!isnan(dy[i]));\n assert(!isnan(y[i]));\n }\n#endif\n // Jii = yi - yi*yi\n // Jij = -yi*yj\n // J = diag(y)-y.T*y\n // dx = J * dy\n // dxk = sum_i(Jki * dyi)\n // dxk = sum_i(-yk*yi * dyi) - (-yk*yk)*dyk + (yk - yk*yk)*dyk\n // dxk = sum_i(-yk*yi * dyi) + yk*yk*dyk + yk*dyk - yk*yk*dyk\n // dxk = sum_i(-yk*yi * dyi) + yk*dyk\n // dxk = -yk * sum_i(yi * dyi) + yk*dyk\n // dxk = -yk * dot(y, dy) + yk*dyk\n // dxk = yk * (- dot(y, dy) + dyk)\n // dxk = yk * (dyk - dot(y, dy))\n //\n // post-order:\n // dot_y_dy := dot(y, dy)\n // dx := dy\n // dx := dx - dot_y_dy\n // dx := dx * y\n\n // linear runtime, no additional memory\n float dot_y_dy = 0;\n ggml_vec_dot_f32 (nc, &dot_y_dy, 0, y, 0, dy, 0, 1);\n ggml_vec_cpy_f32 (nc, dx, dy);\n ggml_vec_acc1_f32 (nc, dx, -dot_y_dy);\n ggml_vec_mul_f32 (nc, dx, dx, y);\n ggml_vec_scale_f32(nc, dx, scale);\n\n#ifndef NDEBUG\n for (int i = 0; i < nc; ++i) {\n assert(!isnan(dx[i]));\n assert(!isinf(dx[i]));\n }\n#endif\n }\n}", "docstring": "// ggml_compute_forward_soft_max_ext_back", "url": "https://github.com/ggerganov/llama.cpp/blob/4078c77f9891831f29ffc7c315c8ec6695ba5ce7/ggml/src/ggml-cpu/ggml-cpu.c#L8967-L9049", "sha": "4078c77f9891831f29ffc7c315c8ec6695ba5ce7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ggml_vec_set_i8", "code": "inline static void ggml_vec_set_i8(const int n, int8_t * x, const int8_t v) { for (int i = 0; i < n; ++i) x[i] = v; }", "docstring": "//\n// fundamental operations\n//", "url": "https://github.com/NouamaneTazi/bloomz.cpp/blob/9614897a272e69a64ca9e57102cd1f86b2d2e34b/Bloomer/bloomz/ggml.c#L1205-L1205", "sha": "9614897a272e69a64ca9e57102cd1f86b2d2e34b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ggml_v_silu", "code": "inline static __m512 ggml_v_silu(__m512 x) {\n const __m512 one = _mm512_set1_ps(1);\n const __m512 zero = _mm512_setzero_ps();\n const __m512 neg_x = _mm512_sub_ps(zero, x);\n const __m512 exp_neg_x = ggml_v_expf(neg_x);\n const __m512 one_plus_exp_neg_x = _mm512_add_ps(one, exp_neg_x);\n return _mm512_div_ps(x, one_plus_exp_neg_x);\n}", "docstring": "// computes silu x/(1+exp(-x)) in single precision vector", "url": "https://github.com/gtreshchev/RuntimeSpeechRecognizer/blob/4faa894a1ed82e8cf4013c4adebdc8fa130471d3/Source/ThirdParty/ggml/src/ggml.c#L2705-L2712", "sha": "4faa894a1ed82e8cf4013c4adebdc8fa130471d3"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "File_serve_client", "code": "static void File_serve_client(void *data, int f_write)\n{\n char *dpip_tag = NULL, *cmd = NULL, *url = NULL, *path;\n ClientInfo *client = data;\n int st;\n\n while (1) {\n _MSG(\"File_serve_client %p, flags=%d state=%d\\n\",\n client, client->flags, client->state);\n if (client->flags & (FILE_DONE | FILE_ERR))\n break;\n if (client->flags & FILE_READ) {\n dpip_tag = a_Dpip_dsh_read_token(client->sh, 0);\n _MSG(\"dpip_tag={%s}\\n\", dpip_tag);\n if (!dpip_tag)\n break;\n }\n\n if (client->flags & FILE_READ) {\n if (!(client->flags & FILE_AUTH_OK)) {\n /* Authenticate our client... */\n st = a_Dpip_check_auth(dpip_tag);\n _MSG(\"a_Dpip_check_auth returned %d\\n\", st);\n client->flags |= (st == 1) ? FILE_AUTH_OK : FILE_ERR;\n } else {\n /* Get file request */\n cmd = a_Dpip_get_attr(dpip_tag, \"cmd\");\n url = a_Dpip_get_attr(dpip_tag, \"url\");\n path = FileUtil_normalize_path(\"file\", url);\n if (cmd) {\n if (strcmp(cmd, \"DpiBye\") == 0) {\n DPIBYE = 1;\n MSG(\"(pid %d): Got DpiBye.\\n\", (int)getpid());\n client->flags |= FILE_DONE;\n } else if (url && dStrnAsciiCasecmp(url, \"dpi:\", 4) == 0 &&\n strcmp(url+4, \"/file/toggle\") == 0) {\n File_toggle_html_style(client);\n } else if (path) {\n File_get(client, path, url);\n } else {\n client->flags |= FILE_ERR;\n MSG(\"ERROR: URL was %s\\n\", url);\n }\n }\n dFree(path);\n dFree(url);\n dFree(cmd);\n dFree(dpip_tag);\n break;\n }\n dFree(dpip_tag);\n\n } else if (f_write) {\n /* send our answer */\n if (client->state == st_err)\n File_send_error_page(client);\n else if (client->d_dir)\n File_send_dir(client);\n else\n File_send_file(client);\n break;\n }\n } /*while*/\n\n client->flags |= (client->sh->status & DPIP_ERROR) ? FILE_ERR : 0;\n client->flags |= (client->sh->status & DPIP_EOF) ? FILE_DONE : 0;\n}", "docstring": "/*\n * Serve this client.\n */", "url": "https://github.com/crossbowerbt/dillo-plus/blob/7d093e6bddcb3338938ea5959844e62ff1f9b76f/dpi/file.c#L524-L590", "sha": "7d093e6bddcb3338938ea5959844e62ff1f9b76f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "wsp_ggml_compute_forward_clamp_f32", "code": "static void wsp_ggml_compute_forward_clamp_f32(\n const struct wsp_ggml_compute_params * params,\n struct wsp_ggml_tensor * dst) {\n\n const struct wsp_ggml_tensor * src0 = dst->src[0];\n\n if (params->ith != 0) {\n return;\n }\n\n float min;\n float max;\n memcpy(&min, (float *) dst->op_params + 0, sizeof(float));\n memcpy(&max, (float *) dst->op_params + 1, sizeof(float));\n\n const int ith = params->ith;\n const int nth = params->nth;\n\n const int n = wsp_ggml_nrows(src0);\n const int nc = src0->ne[0];\n\n const size_t nb00 = src0->nb[0];\n const size_t nb01 = src0->nb[1];\n\n const size_t nb0 = dst->nb[0];\n const size_t nb1 = dst->nb[1];\n\n WSP_GGML_ASSERT( nb0 == sizeof(float));\n WSP_GGML_ASSERT(nb00 == sizeof(float));\n\n for (int j = ith; j < n; j += nth) {\n float * dst_ptr = (float *) ((char *) dst->data + j*nb1);\n float * src0_ptr = (float *) ((char *) src0->data + j*nb01);\n\n for (int i = 0; i < nc; i++) {\n dst_ptr[i] = MAX(MIN(src0_ptr[i], max), min);\n }\n }\n}", "docstring": "// wsp_ggml_compute_forward_clamp", "url": "https://github.com/mybigday/whisper.rn/blob/844cbde318fe8962d0d378e0c093f3cb8000f9a0/cpp/ggml-cpu.c#L9057-L9095", "sha": "844cbde318fe8962d0d378e0c093f3cb8000f9a0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pbuf_memcmp", "code": "u16_t\npbuf_memcmp(const struct pbuf *p, u16_t offset, const void *s2, u16_t n)\n{\n u16_t start = offset;\n const struct pbuf *q = p;\n u16_t i;\n\n /* pbuf long enough to perform check? */\n if (p->tot_len < (offset + n)) {\n return 0xffff;\n }\n\n /* get the correct pbuf from chain. We know it succeeds because of p->tot_len check above. */\n while ((q != NULL) && (q->len <= start)) {\n start = (u16_t)(start - q->len);\n q = q->next;\n }\n\n /* return requested data if pbuf is OK */\n for (i = 0; i < n; i++) {\n /* We know pbuf_get_at() succeeds because of p->tot_len check above. */\n u8_t a = pbuf_get_at(q, (u16_t)(start + i));\n u8_t b = ((const u8_t *)s2)[i];\n if (a != b) {\n return (u16_t)LWIP_MIN(i + 1, 0xFFFF);\n }\n }\n return 0;\n}", "docstring": "/**\n * @ingroup pbuf\n * Compare pbuf contents at specified offset with memory s2, both of length n\n *\n * @param p pbuf to compare\n * @param offset offset into p at which to start comparing\n * @param s2 buffer to compare\n * @param n length of buffer to compare\n * @return zero if equal, nonzero otherwise\n * (0xffff if p is too short, diffoffset+1 otherwise)\n */", "url": "https://github.com/nlzy/nsproxy/blob/9dd996380a4d1e8edaca359cbf32969f74d0286c/lwip/pbuf.c#L1433-L1461", "sha": "9dd996380a4d1e8edaca359cbf32969f74d0286c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChunkCount", "code": "static int ChunkCount(const WebPDemuxer* const dmux, const char fourcc[4]) {\n const uint8_t* const mem_buf = dmux->mem_.buf_;\n const Chunk* c;\n int count = 0;\n for (c = dmux->chunks_; c != NULL; c = c->next_) {\n const uint8_t* const header = mem_buf + c->data_.offset_;\n if (!memcmp(header, fourcc, TAG_SIZE)) ++count;\n }\n return count;\n}", "docstring": "// -----------------------------------------------------------------------------\n// Chunk iteration", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/libraries/webp/src/demux/demux.c#L898-L907", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DoVerticalFilter_SSE2", "code": "static WEBP_INLINE void DoVerticalFilter_SSE2(const uint8_t* in,\n int width, int height, int stride,\n int row, int num_rows,\n uint8_t* out) {\n const size_t start_offset = row * stride;\n const int last_row = row + num_rows;\n DCHECK(in, out);\n in += start_offset;\n out += start_offset;\n\n if (row == 0) {\n // Very first top-left pixel is copied.\n out[0] = in[0];\n // Rest of top scan-line is left-predicted.\n PredictLineLeft_SSE2(in + 1, out + 1, width - 1);\n row = 1;\n in += stride;\n out += stride;\n }\n\n // Filter line-by-line.\n while (row < last_row) {\n PredictLineTop_SSE2(in, in - stride, out, width);\n ++row;\n in += stride;\n out += stride;\n }\n}", "docstring": "//------------------------------------------------------------------------------\n// Vertical filter.", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/libraries/webp/src/dsp/filters_sse2.c#L110-L137", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pthread_cond_destroy", "code": "static int pthread_cond_destroy(pthread_cond_t* const condition) {\n int ok = 1;\n#ifdef USE_WINDOWS_CONDITION_VARIABLE\n (void)condition;\n#else\n ok &= (CloseHandle(condition->waiting_sem_) != 0);\n ok &= (CloseHandle(condition->received_sem_) != 0);\n ok &= (CloseHandle(condition->signal_event_) != 0);\n#endif\n return !ok;\n}", "docstring": "// Condition", "url": "https://github.com/dpjudas/VkDoom/blob/bbaaa9a49db3e22e5c31787faaf790b46b8c87c4/libraries/webp/src/utils/thread_utils.c#L131-L141", "sha": "bbaaa9a49db3e22e5c31787faaf790b46b8c87c4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "vl_aib_merge_nodes", "code": "void\nvl_aib_merge_nodes (VlAIB * aib, vl_uint i, vl_uint j, vl_uint new)\n{\n vl_uint last_entry = aib->nentries - 1 ;\n vl_uint c, n ;\n\n /* clear the list of nodes to update */\n aib->nwhich = 0;\n\n /* make sure that i is smaller than j */\n if(i > j) { vl_uint tmp = j; j = i; i = tmp; }\n\n /* -----------------------------------------------------------------\n * Merge entries i and j, storing the result in i\n * -------------------------------------------------------------- */\n\n aib-> Px [i] += aib->Px[j] ;\n aib-> beta [i] = BETA_MAX ;\n aib-> nodes[i] = new ;\n\n for (c = 0; c < aib->nlabels; c++)\n aib-> Pcx [i*aib->nlabels + c] += aib-> Pcx [j*aib->nlabels + c] ;\n\n /* -----------------------------------------------------------------\n * Move last entry to j\n * -------------------------------------------------------------- */\n\n aib-> Px [j] = aib-> Px [last_entry];\n aib-> beta [j] = aib-> beta [last_entry];\n aib-> bidx [j] = aib-> bidx [last_entry];\n aib-> nodes [j] = aib-> nodes [last_entry];\n\n for (c = 0 ; c < aib->nlabels ; c++)\n aib-> Pcx[j*aib->nlabels + c] = aib-> Pcx [last_entry*aib->nlabels + c] ;\n\n /* delete last entry */\n aib-> nentries -- ;\n\n /* -----------------------------------------------------------------\n * Scan for entries to update\n * -------------------------------------------------------------- */\n\n /*\n * After mergin entries i and j, we need to update all other entries\n * that had one of these two as closest match. We also need to\n * update the renewend entry i. This is added by the loop below\n * since bidx [i] = j exactly because i was merged.\n *\n * Additionaly, since we moved the last entry back to the entry j,\n * we need to adjust the valeus of bidx to reflect this.\n */\n\n for (n = 0 ; n < aib->nentries; n++) {\n if(aib->bidx[n] == i || aib->bidx[n] == j) {\n aib->bidx [n] = 0;\n aib->beta [n] = BETA_MAX;\n aib->which [aib->nwhich++] = n ;\n }\n else if(aib->bidx[n] == last_entry) {\n aib->bidx[n] = j ;\n }\n }\n}", "docstring": "/** ------------------------------------------------------------------\n ** @internal\n ** @brief Merges two nodes i,j in the internal datastructure\n **\n ** @param aib A pointer to the internal data structure\n ** @param i The index of one member of the pair to merge\n ** @param j The index of the other member of the pair to merge\n ** @param new The index of the new node which corresponds to the union of\n ** (@a i, @a j).\n **\n ** Nodes are merged by replacing the entry @a i with the union of @c\n ** ij, moving the node stored in last position (called @c lastnode)\n ** back to jth position and the entry at the end.\n **\n ** After the nodes have been merged, it updates which nodes should be\n ** considered on the next iteration based on which beta values could\n ** potentially change. The merged node will always be part of this\n ** list.\n **/", "url": "https://github.com/json87/SphereSfM/blob/1a01a12be58af8f4f3a8245ea2a0018828336ee8/lib/VLFeat/aib.c#L271-L333", "sha": "1a01a12be58af8f4f3a8245ea2a0018828336ee8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "vl_dsift_delete", "code": "VL_EXPORT void\nvl_dsift_delete (VlDsiftFilter * self)\n{\n _vl_dsift_free_buffers (self) ;\n if (self->convTmp2) vl_free (self->convTmp2) ;\n if (self->convTmp1) vl_free (self->convTmp1) ;\n vl_free (self) ;\n}", "docstring": "/** ------------------------------------------------------------------\n ** @brief Delete DSIFT filter\n ** @param self DSIFT filter.\n **/", "url": "https://github.com/json87/SphereSfM/blob/1a01a12be58af8f4f3a8245ea2a0018828336ee8/lib/VLFeat/dsift.c#L483-L490", "sha": "1a01a12be58af8f4f3a8245ea2a0018828336ee8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "vl_svm_logistic_loss", "code": "double\nvl_svm_logistic_loss (double inner,double label)\n{\n double z = label * inner ;\n if (z >= 0) {\n return log(1.0 + exp(-z)) ;\n } else {\n return -z + log(exp(z) + 1.0) ;\n }\n}", "docstring": "/** @brief SVM l2 loss\n ** @copydetails VlSvmLossFunction */", "url": "https://github.com/json87/SphereSfM/blob/1a01a12be58af8f4f3a8245ea2a0018828336ee8/lib/VLFeat/svm.c#L1759-L1768", "sha": "1a01a12be58af8f4f3a8245ea2a0018828336ee8"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CheckDel", "code": "static void CheckDel(void* ptr, const char* k, size_t klen) {\n int* state = (int*) ptr;\n CheckCondition(*state == 2);\n CheckEqual(\"bar\", k, klen);\n (*state)++;\n}", "docstring": "// Callback from leveldb_writebatch_iterate()", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/classic-level/deps/leveldb/leveldb-1.20/db/c_test.c#L108-L113", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ggml_vec_set_i8", "code": "inline static void ggml_vec_set_i8(const int n, int8_t * x, const int8_t v) { for (int i = 0; i < n; ++i) x[i] = v; }", "docstring": "//\n// fundamental operations\n//", "url": "https://github.com/niw/AlpacaChat/blob/9b5600eb4624cb7739b6df03d3acde76cb7bd2ef/Sources/alpaca.cpp/ggml.c#L1205-L1205", "sha": "9b5600eb4624cb7739b6df03d3acde76cb7bd2ef"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Fado_ConstructStringVectors", "code": "void Fado_ConstructStringVectors(vc_vector** stringVectors, FairyFileInfo* fileInfo, int numFiles) {\n int currentFile;\n size_t currentSym;\n\n for (currentFile = 0; currentFile < numFiles; currentFile++) {\n FairySym* symtab = fileInfo[currentFile].symtabInfo.sectionData;\n\n stringVectors[currentFile] = vc_vector_create(0x40, sizeof(char**), NULL);\n\n /* Build a vector of pointers to defined symbols' names */\n for (currentSym = 0; currentSym < fileInfo[currentFile].symtabInfo.sectionEntryCount; currentSym++) {\n if ((symtab[currentSym].st_shndx != STN_UNDEF) &&\n Fado_CheckInProgBitsSections(symtab[currentSym].st_shndx, fileInfo[currentFile].progBitsSections)) {\n /* Have to pass a double pointer so it copies the pointer instead of the start of the string */\n char* stringPtr = &fileInfo[currentFile].strtab[symtab[currentSym].st_name];\n assert(vc_vector_push_back(stringVectors[currentFile], &stringPtr));\n }\n }\n }\n}", "docstring": "/**\n * For each input file, construct a vector of pointers to the starts of the strings defined in that file.\n */", "url": "https://github.com/zeldaret/af/blob/f013a028f42a1b11134bc5f22f28e9f48236b6f0/tools/fado/src/fado.c#L33-L52", "sha": "f013a028f42a1b11134bc5f22f28e9f48236b6f0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "contactSensor_zclReportCmd", "code": "static void contactSensor_zclReportCmd(u16 clusterId, zclReportCmd_t *pReportCmd)\n{\n printf(\"contactSensor_zclReportCmd\\n\");\n\n}", "docstring": "/*********************************************************************\n * @fn contactSensor_zclReportCmd\n *\n * @brief Handler for ZCL Report command.\n *\n * @param pInHdlrMsg - incoming message to process\n *\n * @return None\n */", "url": "https://github.com/doctor64/tuyaZigbee/blob/3a85776223ac4e689871b3b21852b9fc46325835/IASsensor/zcl_contactSensorCb.c#L248-L252", "sha": "3a85776223ac4e689871b3b21852b9fc46325835"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "tuyaLight_colorTimerStop", "code": "static void tuyaLight_colorTimerStop(void)\n{\n\tif(colorTimerEvt){\n\t\tTL_ZB_TIMER_CANCEL(&colorTimerEvt);\n\t}\n}", "docstring": "/*********************************************************************\n * @fn tuyaLight_colorTimerStop\n *\n * @brief\n *\n * @param None\n *\n * @return None\n */", "url": "https://github.com/doctor64/tuyaZigbee/blob/3a85776223ac4e689871b3b21852b9fc46325835/light/zcl_colorCtrlCb.c#L247-L252", "sha": "3a85776223ac4e689871b3b21852b9fc46325835"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "tuyaSwitch_zclStoreSceneRspCmdHandler", "code": "static void tuyaSwitch_zclStoreSceneRspCmdHandler(storeSceneRsp_t *pStoreSceneRsp)\n{\n\n}", "docstring": "/*********************************************************************\n * @fn tuyaSwitch_zclStoreSceneRspCmdHandler\n *\n * @brief Handler for ZCL store scene response command.\n *\n * @param pStoreSceneRsp\n *\n * @return None\n */", "url": "https://github.com/doctor64/tuyaZigbee/blob/3a85776223ac4e689871b3b21852b9fc46325835/switch/zcl_tuyaSwitchCb.c#L611-L614", "sha": "3a85776223ac4e689871b3b21852b9fc46325835"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pokemon_stat_calc", "code": "void pokemon_stat_calc(PokemonData* pdata, DataStat stat) {\n furi_assert(pdata);\n uint8_t iv;\n uint16_t ev;\n uint8_t base;\n uint8_t level;\n uint16_t calc;\n\n level = pokemon_stat_get(pdata, STAT_LEVEL, NONE);\n base = table_stat_base_get(\n pdata->pokemon_table, pokemon_stat_get(pdata, STAT_NUM, NONE), stat, NONE);\n ev = pokemon_stat_get(pdata, stat + STAT_EV_OFFS, NONE);\n iv = pokemon_stat_get(pdata, stat + STAT_IV_OFFS, NONE);\n\n /* Gen I and II calculation */\n // https://bulbapedia.bulbagarden.net/wiki/Stat#Generations_I_and_II\n calc = floor((((2 * (base + iv)) + floor(sqrt(ev) / 4)) * level) / 100);\n\n if(stat == STAT_HP)\n calc += (level + 10);\n else\n calc += 5;\n\n pokemon_stat_set(pdata, stat, NONE, calc);\n}", "docstring": "/* Calculates stat from current level */", "url": "https://github.com/kbembedded/Flipper-Zero-Game-Boy-Pokemon-Trading/blob/8769074c727ef3fa2da83e30daaf1aa964349c7e/src/pokemon_data.c#L855-L879", "sha": "8769074c727ef3fa2da83e30daaf1aa964349c7e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "rw_interrupt", "code": "static void rw_interrupt(void)\n{\n\tif (result() != 7 || (ST0 & 0xf8) || (ST1 & 0xbf) || (ST2 & 0x73)) {\n\t\tif (ST1 & 0x02) {\n\t\t\tprintk(\"Drive %d is write protected\\n\\r\",current_drive);\n\t\t\tfloppy_deselect(current_drive);\n\t\t\tend_request(0);\n\t\t} else\n\t\t\tbad_flp_intr();\n\t\tdo_fd_request();\n\t\treturn;\n\t}\n\tif (command == FD_READ && (unsigned long)(CURRENT->buffer) >= 0x100000)\n\t\tcopy_buffer(tmp_floppy_area,CURRENT->buffer);\n\tfloppy_deselect(current_drive);\n\tend_request(1);\n\tdo_fd_request();\n}", "docstring": "/*\n * Ok, this interrupt is called after a DMA read/write has succeeded,\n * so we check the results, and copy any buffers.\n */", "url": "https://github.com/TonyWriting/Linux0.11/blob/7fecb719e8261e274d827dc43dc1424bd26cc483/kernel/blk_drv/floppy.c#L250-L267", "sha": "7fecb719e8261e274d827dc43dc1424bd26cc483"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "socket_wait", "code": "static int socket_wait(int fd, int is_read)\n{\n\tfd_set fds, *fdr = 0, *fdw = 0;\n\tstruct timeval tv;\n\tint ret;\n\ttv.tv_sec = 5; tv.tv_usec = 0; // 5 seconds time out\n\tFD_ZERO(&fds);\n\tFD_SET(fd, &fds);\n\tif (is_read) fdr = &fds;\n\telse fdw = &fds;\n\tret = select(fd+1, fdr, fdw, 0, &tv);\n#ifndef _WIN32\n\tif (ret == -1) perror(\"select\");\n#else\n\tif (ret == 0)\n\t\tfprintf(stderr, \"select time-out\\n\");\n\telse if (ret == SOCKET_ERROR)\n\t\tfprintf(stderr, \"select: %d\\n\", WSAGetLastError());\n#endif\n\treturn ret;\n}", "docstring": "/* In winsock.h, the type of a socket is SOCKET, which is: \"typedef\n * u_int SOCKET\". An invalid SOCKET is: \"(SOCKET)(~0)\", or signed\n * integer -1. In knetfile.c, I use \"int\" for socket type\n * throughout. This should be improved to avoid confusion.\n *\n * In Linux/Mac, recv() and read() do almost the same thing. You can see\n * in the header file that netread() is simply an alias of read(). In\n * Windows, however, they are different and using recv() is mandatory.\n */\n/* This function tests if the file handler is ready for reading (or\n * writing if is_read==0). */", "url": "https://github.com/w111liang222/lidar-slam-detection/blob/d57a923b3972d0a0bfdfc0016c32de53c26b9f9f/slam/thirdparty/fast_gicp/thirdparty/nvbio/contrib/htslib/knetfile.c#L60-L80", "sha": "d57a923b3972d0a0bfdfc0016c32de53c26b9f9f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ss_insertionsort", "code": "static\nvoid\nss_insertionsort(const unsigned char *T, const int *PA,\n int *first, int *last, int depth) {\n int *i, *j;\n int t;\n int r;\n\n for(i = last - 2; first <= i; --i) {\n for(t = *i, j = i + 1; 0 < (r = ss_compare(T, PA + t, PA + *j, depth));) {\n do { *(j - 1) = *j; } while((++j < last) && (*j < 0));\n if(last <= j) { break; }\n }\n if(r == 0) { *j = ~*j; }\n *(j - 1) = t;\n }\n}", "docstring": "/* Insertionsort for small size groups */", "url": "https://github.com/w111liang222/lidar-slam-detection/blob/d57a923b3972d0a0bfdfc0016c32de53c26b9f9f/slam/thirdparty/fast_gicp/thirdparty/nvbio/contrib/libdivsufsort-lite/divsufsort.c#L241-L257", "sha": "d57a923b3972d0a0bfdfc0016c32de53c26b9f9f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "operands_set_tsi", "code": "_INLINE_ void operands_set_tsi(_DInst* di, _Operand* op, _OperandType type, uint16_t size, unsigned int index)\n{\n\top->type = type;\n\top->index = (uint8_t)index;\n\top->size = size;\n\tdi->usedRegistersMask |= _REGISTERTORCLASS[index];\n}", "docstring": "/* A helper function to set operand's type, size and index. */", "url": "https://github.com/bruhmoment21/cs2-sdk/blob/3fdb26b0eba5a7335f011c68bb5c7ef5a3171144/cs2-sdk/libs/distorm/src/operands.c#L51-L57", "sha": "3fdb26b0eba5a7335f011c68bb5c7ef5a3171144"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sys_task_exit", "code": "__noreturn static void sys_task_exit(void) {\n task_exit(EXP_GRACE_EXIT);\n}", "docstring": "// 実行中タスクを正常終了する。", "url": "https://github.com/nuta/microkernel-book/blob/8665ccb23eef1136f2bfcd4770e43a3388fba78a/kernel/syscall.c#L120-L122", "sha": "8665ccb23eef1136f2bfcd4770e43a3388fba78a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3TreeViewWindow", "code": "SQLITE_PRIVATE void sqlite3TreeViewWindow(TreeView* pView, const Window* pWin, u8 more) {\n int nElement = 0;\n if (pWin == 0)\n return;\n if (pWin->pFilter) {\n sqlite3TreeViewItem(pView, \"FILTER\", 1);\n sqlite3TreeViewExpr(pView, pWin->pFilter, 0);\n sqlite3TreeViewPop(&pView);\n }\n sqlite3TreeViewPush(&pView, more);\n if (pWin->zName) {\n sqlite3TreeViewLine(pView, \"OVER %s (%p)\", pWin->zName, pWin);\n } else {\n sqlite3TreeViewLine(pView, \"OVER (%p)\", pWin);\n }\n if (pWin->zBase)\n nElement++;\n if (pWin->pOrderBy)\n nElement++;\n if (pWin->eFrmType)\n nElement++;\n if (pWin->eExclude)\n nElement++;\n if (pWin->zBase) {\n sqlite3TreeViewPush(&pView, (--nElement) > 0);\n sqlite3TreeViewLine(pView, \"window: %s\", pWin->zBase);\n sqlite3TreeViewPop(&pView);\n }\n if (pWin->pPartition) {\n sqlite3TreeViewExprList(pView, pWin->pPartition, nElement > 0, \"PARTITION-BY\");\n }\n if (pWin->pOrderBy) {\n sqlite3TreeViewExprList(pView, pWin->pOrderBy, (--nElement) > 0, \"ORDER-BY\");\n }\n if (pWin->eFrmType) {\n char zBuf[30];\n const char* zFrmType = \"ROWS\";\n if (pWin->eFrmType == TK_RANGE)\n zFrmType = \"RANGE\";\n if (pWin->eFrmType == TK_GROUPS)\n zFrmType = \"GROUPS\";\n sqlite3_snprintf(sizeof(zBuf), zBuf, \"%s%s\", zFrmType, pWin->bImplicitFrame ? \" (implied)\" : \"\");\n sqlite3TreeViewItem(pView, zBuf, (--nElement) > 0);\n sqlite3TreeViewBound(pView, pWin->eStart, pWin->pStart, 1);\n sqlite3TreeViewBound(pView, pWin->eEnd, pWin->pEnd, 0);\n sqlite3TreeViewPop(&pView);\n }\n if (pWin->eExclude) {\n char zBuf[30];\n const char* zExclude;\n switch (pWin->eExclude) {\n case TK_NO:\n zExclude = \"NO OTHERS\";\n break;\n case TK_CURRENT:\n zExclude = \"CURRENT ROW\";\n break;\n case TK_GROUP:\n zExclude = \"GROUP\";\n break;\n case TK_TIES:\n zExclude = \"TIES\";\n break;\n default:\n sqlite3_snprintf(sizeof(zBuf), zBuf, \"invalid(%d)\", pWin->eExclude);\n zExclude = zBuf;\n break;\n }\n sqlite3TreeViewPush(&pView, 0);\n sqlite3TreeViewLine(pView, \"EXCLUDE %s\", zExclude);\n sqlite3TreeViewPop(&pView);\n }\n sqlite3TreeViewPop(&pView);\n}", "docstring": "/*\n** Generate a human-readable explanation for a Window object\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L31036-L31109", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "winShmUnmap", "code": "static int winShmUnmap(sqlite3_file* fd, /* Database holding shared memory */\n int deleteFlag /* Delete after closing if true */\n) {\n winFile* pDbFd; /* Database holding shared-memory */\n winShm* p; /* The connection to be closed */\n winShmNode* pShmNode; /* The underlying shared-memory file */\n winShm** pp; /* For looping over sibling connections */\n\n pDbFd = (winFile*)fd;\n p = pDbFd->pShm;\n if (p == 0)\n return SQLITE_OK;\n pShmNode = p->pShmNode;\n\n /* Remove connection p from the set of connections associated\n ** with pShmNode */\n sqlite3_mutex_enter(pShmNode->mutex);\n for (pp = &pShmNode->pFirst; (*pp) != p; pp = &(*pp)->pNext) {\n }\n *pp = p->pNext;\n\n /* Free the connection p */\n sqlite3_free(p);\n pDbFd->pShm = 0;\n sqlite3_mutex_leave(pShmNode->mutex);\n\n /* If pShmNode->nRef has reached 0, then close the underlying\n ** shared-memory file, too */\n winShmEnterMutex();\n assert(pShmNode->nRef > 0);\n pShmNode->nRef--;\n if (pShmNode->nRef == 0) {\n winShmPurge(pDbFd->pVfs, deleteFlag);\n }\n winShmLeaveMutex();\n\n return SQLITE_OK;\n}", "docstring": "/*\n** Close a connection to shared-memory. Delete the underlying\n** storage if deleteFlag is true.\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L47001-L47038", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "memdbDeviceCharacteristics", "code": "static int memdbDeviceCharacteristics(sqlite3_file* pFile) {\n UNUSED_PARAMETER(pFile);\n return SQLITE_IOCAP_ATOMIC | SQLITE_IOCAP_POWERSAFE_OVERWRITE | SQLITE_IOCAP_SAFE_APPEND | SQLITE_IOCAP_SEQUENTIAL;\n}", "docstring": "/*\n** Return the device characteristic flags supported by an memdb-file.\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L49514-L49517", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3PagerSharedLock", "code": "SQLITE_PRIVATE int sqlite3PagerSharedLock(Pager* pPager) {\n int rc = SQLITE_OK; /* Return code */\n\n /* This routine is only called from b-tree and only when there are no\n ** outstanding pages. This implies that the pager state should either\n ** be OPEN or READER. READER is only possible if the pager is or was in\n ** exclusive access mode. */\n assert(sqlite3PcacheRefCount(pPager->pPCache) == 0);\n assert(assert_pager_state(pPager));\n assert(pPager->eState == PAGER_OPEN || pPager->eState == PAGER_READER);\n assert(pPager->errCode == SQLITE_OK);\n\n if (!pagerUseWal(pPager) && pPager->eState == PAGER_OPEN) {\n int bHotJournal = 1; /* True if there exists a hot journal-file */\n\n assert(!MEMDB);\n assert(pPager->tempFile == 0 || pPager->eLock == EXCLUSIVE_LOCK);\n\n rc = pager_wait_on_lock(pPager, SHARED_LOCK);\n if (rc != SQLITE_OK) {\n assert(pPager->eLock == NO_LOCK || pPager->eLock == UNKNOWN_LOCK);\n goto failed;\n }\n\n /* If a journal file exists, and there is no RESERVED lock on the\n ** database file, then it either needs to be played back or deleted.\n */\n if (pPager->eLock <= SHARED_LOCK) {\n rc = hasHotJournal(pPager, &bHotJournal);\n }\n if (rc != SQLITE_OK) {\n goto failed;\n }\n if (bHotJournal) {\n if (pPager->readOnly) {\n rc = SQLITE_READONLY_ROLLBACK;\n goto failed;\n }\n\n /* Get an EXCLUSIVE lock on the database file. At this point it is\n ** important that a RESERVED lock is not obtained on the way to the\n ** EXCLUSIVE lock. If it were, another process might open the\n ** database file, detect the RESERVED lock, and conclude that the\n ** database is safe to read while this process is still rolling the\n ** hot-journal back.\n **\n ** Because the intermediate RESERVED lock is not requested, any\n ** other process attempting to access the database file will get to\n ** this point in the code and fail to obtain its own EXCLUSIVE lock\n ** on the database file.\n **\n ** Unless the pager is in locking_mode=exclusive mode, the lock is\n ** downgraded to SHARED_LOCK before this function returns.\n */\n rc = pagerLockDb(pPager, EXCLUSIVE_LOCK);\n if (rc != SQLITE_OK) {\n goto failed;\n }\n\n /* If it is not already open and the file exists on disk, open the\n ** journal for read/write access. Write access is required because\n ** in exclusive-access mode the file descriptor will be kept open\n ** and possibly used for a transaction later on. Also, write-access\n ** is usually required to finalize the journal in journal_mode=persist\n ** mode (and also for journal_mode=truncate on some systems).\n **\n ** If the journal does not exist, it usually means that some\n ** other connection managed to get in and roll it back before\n ** this connection obtained the exclusive lock above. Or, it\n ** may mean that the pager was in the error-state when this\n ** function was called and the journal file does not exist.\n */\n if (!isOpen(pPager->jfd) && pPager->journalMode != PAGER_JOURNALMODE_OFF) {\n sqlite3_vfs* const pVfs = pPager->pVfs;\n int bExists; /* True if journal file exists */\n rc = sqlite3OsAccess(pVfs, pPager->zJournal, SQLITE_ACCESS_EXISTS, &bExists);\n if (rc == SQLITE_OK && bExists) {\n int fout = 0;\n int f = SQLITE_OPEN_READWRITE | SQLITE_OPEN_MAIN_JOURNAL;\n assert(!pPager->tempFile);\n rc = sqlite3OsOpen(pVfs, pPager->zJournal, pPager->jfd, f, &fout);\n assert(rc != SQLITE_OK || isOpen(pPager->jfd));\n if (rc == SQLITE_OK && fout & SQLITE_OPEN_READONLY) {\n rc = SQLITE_CANTOPEN_BKPT;\n sqlite3OsClose(pPager->jfd);\n }\n }\n }\n\n /* Playback and delete the journal. Drop the database write\n ** lock and reacquire the read lock. Purge the cache before\n ** playing back the hot-journal so that we don't end up with\n ** an inconsistent cache. Sync the hot journal before playing\n ** it back since the process that crashed and left the hot journal\n ** probably did not sync it and we are required to always sync\n ** the journal before playing it back.\n */\n if (isOpen(pPager->jfd)) {\n assert(rc == SQLITE_OK);\n rc = pagerSyncHotJournal(pPager);\n if (rc == SQLITE_OK) {\n rc = pager_playback(pPager, !pPager->tempFile);\n pPager->eState = PAGER_OPEN;\n }\n } else if (!pPager->exclusiveMode) {\n pagerUnlockDb(pPager, SHARED_LOCK);\n }\n\n if (rc != SQLITE_OK) {\n /* This branch is taken if an error occurs while trying to open\n ** or roll back a hot-journal while holding an EXCLUSIVE lock. The\n ** pager_unlock() routine will be called before returning to unlock\n ** the file. If the unlock attempt fails, then Pager.eLock must be\n ** set to UNKNOWN_LOCK (see the comment above the #define for\n ** UNKNOWN_LOCK above for an explanation).\n **\n ** In order to get pager_unlock() to do this, set Pager.eState to\n ** PAGER_ERROR now. This is not actually counted as a transition\n ** to ERROR state in the state diagram at the top of this file,\n ** since we know that the same call to pager_unlock() will very\n ** shortly transition the pager object to the OPEN state. Calling\n ** assert_pager_state() would fail now, as it should not be possible\n ** to be in ERROR state when there are zero outstanding page\n ** references.\n */\n pager_error(pPager, rc);\n goto failed;\n }\n\n assert(pPager->eState == PAGER_OPEN);\n assert((pPager->eLock == SHARED_LOCK) || (pPager->exclusiveMode && pPager->eLock > SHARED_LOCK));\n }\n\n if (!pPager->tempFile && pPager->hasHeldSharedLock) {\n /* The shared-lock has just been acquired then check to\n ** see if the database has been modified. If the database has changed,\n ** flush the cache. The hasHeldSharedLock flag prevents this from\n ** occurring on the very first access to a file, in order to save a\n ** single unnecessary sqlite3OsRead() call at the start-up.\n **\n ** Database changes are detected by looking at 15 bytes beginning\n ** at offset 24 into the file. The first 4 of these 16 bytes are\n ** a 32-bit counter that is incremented with each change. The\n ** other bytes change randomly with each file change when\n ** a codec is in use.\n **\n ** There is a vanishingly small chance that a change will not be\n ** detected. The chance of an undetected change is so small that\n ** it can be neglected.\n */\n char dbFileVers[sizeof(pPager->dbFileVers)];\n\n IOTRACE((\"CKVERS %p %d\\n\", pPager, sizeof(dbFileVers)));\n rc = sqlite3OsRead(pPager->fd, &dbFileVers, sizeof(dbFileVers), 24);\n if (rc != SQLITE_OK) {\n if (rc != SQLITE_IOERR_SHORT_READ) {\n goto failed;\n }\n memset(dbFileVers, 0, sizeof(dbFileVers));\n }\n\n if (memcmp(pPager->dbFileVers, dbFileVers, sizeof(dbFileVers)) != 0) {\n pager_reset(pPager);\n\n /* Unmap the database file. It is possible that external processes\n ** may have truncated the database file and then extended it back\n ** to its original size while this process was not holding a lock.\n ** In this case there may exist a Pager.pMap mapping that appears\n ** to be the right size but is not actually valid. Avoid this\n ** possibility by unmapping the db here. */\n if (USEFETCH(pPager)) {\n sqlite3OsUnfetch(pPager->fd, 0, 0);\n }\n }\n }\n\n /* If there is a WAL file in the file-system, open this database in WAL\n ** mode. Otherwise, the following function call is a no-op.\n */\n rc = pagerOpenWalIfPresent(pPager);\n#ifndef SQLITE_OMIT_WAL\n assert(pPager->pWal == 0 || rc == SQLITE_OK);\n#endif\n }\n\n if (pagerUseWal(pPager)) {\n assert(rc == SQLITE_OK);\n rc = pagerBeginReadTransaction(pPager);\n }\n\n if (pPager->tempFile == 0 && pPager->eState == PAGER_OPEN && rc == SQLITE_OK) {\n rc = pagerPagecount(pPager, &pPager->dbSize);\n }\n\nfailed:\n if (rc != SQLITE_OK) {\n assert(!MEMDB);\n pager_unlock(pPager);\n assert(pPager->eState == PAGER_OPEN);\n } else {\n pPager->eState = PAGER_READER;\n pPager->hasHeldSharedLock = 1;\n }\n return rc;\n}", "docstring": "/*\n** This function is called to obtain a shared lock on the database file.\n** It is illegal to call sqlite3PagerGet() until after this function\n** has been successfully called. If a shared-lock is already held when\n** this function is called, it is a no-op.\n**\n** The following operations are also performed by this function.\n**\n** 1) If the pager is currently in PAGER_OPEN state (no lock held\n** on the database file), then an attempt is made to obtain a\n** SHARED lock on the database file. Immediately after obtaining\n** the SHARED lock, the file-system is checked for a hot-journal,\n** which is played back if present. Following any hot-journal\n** rollback, the contents of the cache are validated by checking\n** the 'change-counter' field of the database file header and\n** discarded if they are found to be invalid.\n**\n** 2) If the pager is running in exclusive-mode, and there are currently\n** no outstanding references to any pages, and is in the error state,\n** then an attempt is made to clear the error state by discarding\n** the contents of the page cache and rolling back any open journal\n** file.\n**\n** If everything is successful, SQLITE_OK is returned. If an IO error\n** occurs while locking the database, checking for a hot-journal file or\n** rolling back a journal file, the IO error code is returned.\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L58291-L58495", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3BtreeSeekCount", "code": "SQLITE_PRIVATE sqlite3_uint64 sqlite3BtreeSeekCount(Btree* pBt) {\n u64 n = pBt->nSeek;\n pBt->nSeek = 0;\n return n;\n}", "docstring": "/*\n** Return and reset the seek counter for a Btree object.\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L66055-L66059", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3VdbeIOTraceSql", "code": "SQLITE_PRIVATE void sqlite3VdbeIOTraceSql(Vdbe* p) {\n int nOp = p->nOp;\n VdbeOp* pOp;\n if (sqlite3IoTrace == 0)\n return;\n if (nOp < 1)\n return;\n pOp = &p->aOp[0];\n if (pOp->opcode == OP_Init && pOp->p4.z != 0) {\n int i, j;\n char z[1000];\n sqlite3_snprintf(sizeof(z), z, \"%s\", pOp->p4.z);\n for (i = 0; sqlite3Isspace(z[i]); i++) {\n }\n for (j = 0; z[i]; i++) {\n if (sqlite3Isspace(z[i])) {\n if (z[i - 1] != ' ') {\n z[j++] = ' ';\n }\n } else {\n z[j++] = z[i];\n }\n }\n z[j] = 0;\n sqlite3IoTrace(\"SQL %s\\n\", z);\n }\n}", "docstring": "/*\n** Print an IOTRACE message showing SQL content.\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L82084-L82110", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3_blob_bytes", "code": "SQLITE_API int sqlite3_blob_bytes(sqlite3_blob* pBlob) {\n Incrblob* p = (Incrblob*)pBlob;\n return (p && p->pStmt) ? p->nByte : 0;\n}", "docstring": "/*\n** Query a blob handle for the size of the data.\n**\n** The Incrblob.nByte field is fixed for the lifetime of the Incrblob\n** so no mutex is required for access.\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L96615-L96618", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "jsonIs4Hex", "code": "static int jsonIs4Hex(const char* z) {\n int i;\n for (i = 0; i < 4; i++)\n if (!sqlite3Isxdigit(z[i]))\n return 0;\n return 1;\n}", "docstring": "/*\n** Return true if z[] begins with 4 (or more) hexadecimal digits\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L194002-L194008", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "jsonEachColumn", "code": "static int jsonEachColumn(sqlite3_vtab_cursor* cur, /* The cursor */\n sqlite3_context* ctx, /* First argument to sqlite3_result_...() */\n int i /* Which column to return */\n) {\n JsonEachCursor* p = (JsonEachCursor*)cur;\n JsonNode* pThis = &p->sParse.aNode[p->i];\n switch (i) {\n case JEACH_KEY: {\n if (p->i == 0)\n break;\n if (p->eType == JSON_OBJECT) {\n jsonReturn(pThis, ctx, 0);\n } else if (p->eType == JSON_ARRAY) {\n u32 iKey;\n if (p->bRecursive) {\n if (p->iRowid == 0)\n break;\n assert(p->sParse.aNode[p->sParse.aUp[p->i]].eU == 3);\n iKey = p->sParse.aNode[p->sParse.aUp[p->i]].u.iKey;\n } else {\n iKey = p->iRowid;\n }\n sqlite3_result_int64(ctx, (sqlite3_int64)iKey);\n }\n break;\n }\n case JEACH_VALUE: {\n if (pThis->jnFlags & JNODE_LABEL)\n pThis++;\n jsonReturn(pThis, ctx, 0);\n break;\n }\n case JEACH_TYPE: {\n if (pThis->jnFlags & JNODE_LABEL)\n pThis++;\n sqlite3_result_text(ctx, jsonType[pThis->eType], -1, SQLITE_STATIC);\n break;\n }\n case JEACH_ATOM: {\n if (pThis->jnFlags & JNODE_LABEL)\n pThis++;\n if (pThis->eType >= JSON_ARRAY)\n break;\n jsonReturn(pThis, ctx, 0);\n break;\n }\n case JEACH_ID: {\n sqlite3_result_int64(ctx, (sqlite3_int64)p->i + ((pThis->jnFlags & JNODE_LABEL) != 0));\n break;\n }\n case JEACH_PARENT: {\n if (p->i > p->iBegin && p->bRecursive) {\n sqlite3_result_int64(ctx, (sqlite3_int64)p->sParse.aUp[p->i]);\n }\n break;\n }\n case JEACH_FULLKEY: {\n JsonString x;\n jsonInit(&x, ctx);\n if (p->bRecursive) {\n jsonEachComputePath(p, &x, p->i);\n } else {\n if (p->zRoot) {\n jsonAppendRaw(&x, p->zRoot, (int)strlen(p->zRoot));\n } else {\n jsonAppendChar(&x, '$');\n }\n if (p->eType == JSON_ARRAY) {\n jsonPrintf(30, &x, \"[%d]\", p->iRowid);\n } else if (p->eType == JSON_OBJECT) {\n jsonAppendObjectPathElement(&x, pThis);\n }\n }\n jsonResult(&x);\n break;\n }\n case JEACH_PATH: {\n if (p->bRecursive) {\n JsonString x;\n jsonInit(&x, ctx);\n jsonEachComputePath(p, &x, p->sParse.aUp[p->i]);\n jsonResult(&x);\n break;\n }\n /* For json_each() path and root are the same so fall through\n ** into the root case */\n /* no break */ deliberate_fall_through\n }\n default: {\n const char* zRoot = p->zRoot;\n if (zRoot == 0)\n zRoot = \"$\";\n sqlite3_result_text(ctx, zRoot, -1, SQLITE_STATIC);\n break;\n }\n case JEACH_JSON: {\n assert(i == JEACH_JSON);\n sqlite3_result_text(ctx, p->sParse.zJson, -1, SQLITE_STATIC);\n break;\n }\n }\n return SQLITE_OK;\n}", "docstring": "/* Return the value of a column */", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L195566-L195668", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sessionDiffHooks", "code": "static void sessionDiffHooks(sqlite3_session* pSession, SessionDiffCtx* pDiffCtx) {\n pSession->hook.pCtx = (void*)pDiffCtx;\n pSession->hook.xOld = sessionDiffOld;\n pSession->hook.xNew = sessionDiffNew;\n pSession->hook.xCount = sessionDiffCount;\n pSession->hook.xDepth = sessionDiffDepth;\n}", "docstring": "/*\n** Install the diff hooks on the session object passed as the only\n** argument.\n*/", "url": "https://github.com/margelo/react-native-nitro-sqlite/blob/09d9159c7d3aca803c69fda2b626868338acbb50/package/cpp/sqlite/sqlite3.c#L211450-L211456", "sha": "09d9159c7d3aca803c69fda2b626868338acbb50"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_CAN_RxFifo1FullCallback", "code": "__weak void HAL_CAN_RxFifo1FullCallback(CAN_HandleTypeDef *hcan)\n{\n /* Prevent unused argument(s) compilation warning */\n UNUSED(hcan);\n\n /* NOTE : This function Should not be modified, when the callback is needed,\n the HAL_CAN_RxFifo1FullCallback could be implemented in the user\n file\n */\n}", "docstring": "/**\n * @brief Rx FIFO 1 full callback.\n * @param hcan pointer to a CAN_HandleTypeDef structure that contains\n * the configuration information for the specified CAN.\n * @retval None\n */", "url": "https://github.com/dalathegreat/Nissan-LEAF-Battery-Upgrade/blob/96cee856d7fb38b2e72736ddfccb8e07eb7bb594/Software/CANBRIDGE-2port/source/Drivers/STM32F1xx_HAL_Driver/Src/stm32f1xx_hal_can.c#L2259-L2268", "sha": "96cee856d7fb38b2e72736ddfccb8e07eb7bb594"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HAL_RCC_GetClockConfig", "code": "void HAL_RCC_GetClockConfig(RCC_ClkInitTypeDef *RCC_ClkInitStruct, uint32_t *pFLatency)\n{\n /* Check the parameters */\n assert_param(RCC_ClkInitStruct != NULL);\n assert_param(pFLatency != NULL);\n\n /* Set all possible values for the Clock type parameter --------------------*/\n RCC_ClkInitStruct->ClockType = RCC_CLOCKTYPE_SYSCLK | RCC_CLOCKTYPE_HCLK | RCC_CLOCKTYPE_PCLK1 | RCC_CLOCKTYPE_PCLK2;\n\n /* Get the SYSCLK configuration --------------------------------------------*/\n RCC_ClkInitStruct->SYSCLKSource = (uint32_t)(RCC->CFGR & RCC_CFGR_SW);\n\n /* Get the HCLK configuration ----------------------------------------------*/\n RCC_ClkInitStruct->AHBCLKDivider = (uint32_t)(RCC->CFGR & RCC_CFGR_HPRE);\n\n /* Get the APB1 configuration ----------------------------------------------*/\n RCC_ClkInitStruct->APB1CLKDivider = (uint32_t)(RCC->CFGR & RCC_CFGR_PPRE1);\n\n /* Get the APB2 configuration ----------------------------------------------*/\n RCC_ClkInitStruct->APB2CLKDivider = (uint32_t)((RCC->CFGR & RCC_CFGR_PPRE2) >> 3);\n\n#if defined(FLASH_ACR_LATENCY)\n /* Get the Flash Wait State (Latency) configuration ------------------------*/\n *pFLatency = (uint32_t)(FLASH->ACR & FLASH_ACR_LATENCY);\n#else\n /* For VALUE lines devices, only LATENCY_0 can be set*/\n *pFLatency = (uint32_t)FLASH_LATENCY_0;\n#endif\n}", "docstring": "/**\n * @brief Get the RCC_ClkInitStruct according to the internal\n * RCC configuration registers.\n * @param RCC_ClkInitStruct pointer to an RCC_ClkInitTypeDef structure that\n * contains the current clock configuration.\n * @param pFLatency Pointer on the Flash Latency.\n * @retval None\n */", "url": "https://github.com/dalathegreat/Nissan-LEAF-Battery-Upgrade/blob/96cee856d7fb38b2e72736ddfccb8e07eb7bb594/Software/CANBRIDGE-2port/source/Drivers/STM32F1xx_HAL_Driver/Src/stm32f1xx_hal_rcc.c#L1312-L1340", "sha": "96cee856d7fb38b2e72736ddfccb8e07eb7bb594"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "zipfileColumn", "code": "static int zipfileColumn(\n sqlite3_vtab_cursor *cur, /* The cursor */\n sqlite3_context *ctx, /* First argument to sqlite3_result_...() */\n int i /* Which column to return */\n){\n ZipfileCsr *pCsr = (ZipfileCsr*)cur;\n ZipfileCDS *pCDS = &pCsr->pCurrent->cds;\n int rc = SQLITE_OK;\n switch( i ){\n case 0: /* name */\n sqlite3_result_text(ctx, pCDS->zFile, -1, SQLITE_TRANSIENT);\n break;\n case 1: /* mode */\n /* TODO: Whether or not the following is correct surely depends on\n ** the platform on which the archive was created. */\n sqlite3_result_int(ctx, pCDS->iExternalAttr >> 16);\n break;\n case 2: { /* mtime */\n sqlite3_result_int64(ctx, pCsr->pCurrent->mUnixTime);\n break;\n }\n case 3: { /* sz */\n if( sqlite3_vtab_nochange(ctx)==0 ){\n sqlite3_result_int64(ctx, pCDS->szUncompressed);\n }\n break;\n }\n case 4: /* rawdata */\n if( sqlite3_vtab_nochange(ctx) ) break;\n case 5: { /* data */\n if( i==4 || pCDS->iCompression==0 || pCDS->iCompression==8 ){\n int sz = pCDS->szCompressed;\n int szFinal = pCDS->szUncompressed;\n if( szFinal>0 ){\n u8 *aBuf;\n u8 *aFree = 0;\n if( pCsr->pCurrent->aData ){\n aBuf = pCsr->pCurrent->aData;\n }else{\n aBuf = aFree = sqlite3_malloc64(sz);\n if( aBuf==0 ){\n rc = SQLITE_NOMEM;\n }else{\n FILE *pFile = pCsr->pFile;\n if( pFile==0 ){\n pFile = ((ZipfileTab*)(pCsr->base.pVtab))->pWriteFd;\n }\n rc = zipfileReadData(pFile, aBuf, sz, pCsr->pCurrent->iDataOff,\n &pCsr->base.pVtab->zErrMsg\n );\n }\n }\n if( rc==SQLITE_OK ){\n if( i==5 && pCDS->iCompression ){\n zipfileInflate(ctx, aBuf, sz, szFinal);\n }else{\n sqlite3_result_blob(ctx, aBuf, sz, SQLITE_TRANSIENT);\n }\n }\n sqlite3_free(aFree);\n }else{\n /* Figure out if this is a directory or a zero-sized file. Consider\n ** it to be a directory either if the mode suggests so, or if\n ** the final character in the name is '/'. */\n u32 mode = pCDS->iExternalAttr >> 16;\n if( !(mode & S_IFDIR) && pCDS->zFile[pCDS->nFile-1]!='/' ){\n sqlite3_result_blob(ctx, \"\", 0, SQLITE_STATIC);\n }\n }\n }\n break;\n }\n case 6: /* method */\n sqlite3_result_int(ctx, pCDS->iCompression);\n break;\n default: /* z */\n assert( i==7 );\n sqlite3_result_int64(ctx, pCsr->iId);\n break;\n }\n\n return rc;\n}", "docstring": "/*\n** Return values of columns for the row at which the series_cursor\n** is currently pointing.\n*/", "url": "https://github.com/plasma-umass/sqlwrite/blob/099aec4292676b58c179e4d6e11635e624fcd289/shell.c#L8711-L8793", "sha": "099aec4292676b58c179e4d6e11635e624fcd289"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "read32bits", "code": "static int read32bits(sqlite3_file *fd, i64 offset, u32 *pRes){\n unsigned char ac[4];\n int rc = sqlite3OsRead(fd, ac, sizeof(ac), offset);\n if( rc==SQLITE_OK ){\n *pRes = sqlite3Get4byte(ac);\n }\n return rc;\n}", "docstring": "/*\n** Read a 32-bit integer from the given file descriptor. Store the integer\n** that is read in *pRes. Return SQLITE_OK if everything worked, or an\n** error code is something goes wrong.\n**\n** All values are stored on disk as big-endian.\n*/", "url": "https://github.com/plasma-umass/sqlwrite/blob/099aec4292676b58c179e4d6e11635e624fcd289/sqlite3.c#L56589-L56596", "sha": "099aec4292676b58c179e4d6e11635e624fcd289"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3PagerCommitPhaseOne", "code": "SQLITE_PRIVATE int sqlite3PagerCommitPhaseOne(\n Pager *pPager, /* Pager object */\n const char *zSuper, /* If not NULL, the super-journal name */\n int noSync /* True to omit the xSync on the db file */\n){\n int rc = SQLITE_OK; /* Return code */\n\n assert( pPager->eState==PAGER_WRITER_LOCKED\n || pPager->eState==PAGER_WRITER_CACHEMOD\n || pPager->eState==PAGER_WRITER_DBMOD\n || pPager->eState==PAGER_ERROR\n );\n assert( assert_pager_state(pPager) );\n\n /* If a prior error occurred, report that error again. */\n if( NEVER(pPager->errCode) ) return pPager->errCode;\n\n /* Provide the ability to easily simulate an I/O error during testing */\n if( sqlite3FaultSim(400) ) return SQLITE_IOERR;\n\n PAGERTRACE((\"DATABASE SYNC: File=%s zSuper=%s nSize=%d\\n\",\n pPager->zFilename, zSuper, pPager->dbSize));\n\n /* If no database changes have been made, return early. */\n if( pPager->eStatetempFile );\n assert( isOpen(pPager->fd) || pPager->tempFile );\n if( 0==pagerFlushOnCommit(pPager, 1) ){\n /* If this is an in-memory db, or no pages have been written to, or this\n ** function has already been called, it is mostly a no-op. However, any\n ** backup in progress needs to be restarted. */\n sqlite3BackupRestart(pPager->pBackup);\n }else{\n PgHdr *pList;\n if( pagerUseWal(pPager) ){\n PgHdr *pPageOne = 0;\n pList = sqlite3PcacheDirtyList(pPager->pPCache);\n if( pList==0 ){\n /* Must have at least one page for the WAL commit flag.\n ** Ticket [2d1a5c67dfc2363e44f29d9bbd57f] 2011-05-18 */\n rc = sqlite3PagerGet(pPager, 1, &pPageOne, 0);\n pList = pPageOne;\n pList->pDirty = 0;\n }\n assert( rc==SQLITE_OK );\n if( ALWAYS(pList) ){\n rc = pagerWalFrames(pPager, pList, pPager->dbSize, 1);\n }\n sqlite3PagerUnref(pPageOne);\n if( rc==SQLITE_OK ){\n sqlite3PcacheCleanAll(pPager->pPCache);\n }\n }else{\n /* The bBatch boolean is true if the batch-atomic-write commit method\n ** should be used. No rollback journal is created if batch-atomic-write\n ** is enabled.\n */\n#ifdef SQLITE_ENABLE_BATCH_ATOMIC_WRITE\n sqlite3_file *fd = pPager->fd;\n int bBatch = zSuper==0 /* An SQLITE_IOCAP_BATCH_ATOMIC commit */\n && (sqlite3OsDeviceCharacteristics(fd) & SQLITE_IOCAP_BATCH_ATOMIC)\n && !pPager->noSync\n && sqlite3JournalIsInMemory(pPager->jfd);\n#else\n# define bBatch 0\n#endif\n\n#ifdef SQLITE_ENABLE_ATOMIC_WRITE\n /* The following block updates the change-counter. Exactly how it\n ** does this depends on whether or not the atomic-update optimization\n ** was enabled at compile time, and if this transaction meets the\n ** runtime criteria to use the operation:\n **\n ** * The file-system supports the atomic-write property for\n ** blocks of size page-size, and\n ** * This commit is not part of a multi-file transaction, and\n ** * Exactly one page has been modified and store in the journal file.\n **\n ** If the optimization was not enabled at compile time, then the\n ** pager_incr_changecounter() function is called to update the change\n ** counter in 'indirect-mode'. If the optimization is compiled in but\n ** is not applicable to this transaction, call sqlite3JournalCreate()\n ** to make sure the journal file has actually been created, then call\n ** pager_incr_changecounter() to update the change-counter in indirect\n ** mode.\n **\n ** Otherwise, if the optimization is both enabled and applicable,\n ** then call pager_incr_changecounter() to update the change-counter\n ** in 'direct' mode. In this case the journal file will never be\n ** created for this transaction.\n */\n if( bBatch==0 ){\n PgHdr *pPg;\n assert( isOpen(pPager->jfd)\n || pPager->journalMode==PAGER_JOURNALMODE_OFF\n || pPager->journalMode==PAGER_JOURNALMODE_WAL\n );\n if( !zSuper && isOpen(pPager->jfd)\n && pPager->journalOff==jrnlBufferSize(pPager)\n && pPager->dbSize>=pPager->dbOrigSize\n && (!(pPg = sqlite3PcacheDirtyList(pPager->pPCache)) || 0==pPg->pDirty)\n ){\n /* Update the db file change counter via the direct-write method. The\n ** following call will modify the in-memory representation of page 1\n ** to include the updated change counter and then write page 1\n ** directly to the database file. Because of the atomic-write\n ** property of the host file-system, this is safe.\n */\n rc = pager_incr_changecounter(pPager, 1);\n }else{\n rc = sqlite3JournalCreate(pPager->jfd);\n if( rc==SQLITE_OK ){\n rc = pager_incr_changecounter(pPager, 0);\n }\n }\n }\n#else /* SQLITE_ENABLE_ATOMIC_WRITE */\n#ifdef SQLITE_ENABLE_BATCH_ATOMIC_WRITE\n if( zSuper ){\n rc = sqlite3JournalCreate(pPager->jfd);\n if( rc!=SQLITE_OK ) goto commit_phase_one_exit;\n assert( bBatch==0 );\n }\n#endif\n rc = pager_incr_changecounter(pPager, 0);\n#endif /* !SQLITE_ENABLE_ATOMIC_WRITE */\n if( rc!=SQLITE_OK ) goto commit_phase_one_exit;\n\n /* Write the super-journal name into the journal file. If a\n ** super-journal file name has already been written to the journal file,\n ** or if zSuper is NULL (no super-journal), then this call is a no-op.\n */\n rc = writeSuperJournal(pPager, zSuper);\n if( rc!=SQLITE_OK ) goto commit_phase_one_exit;\n\n /* Sync the journal file and write all dirty pages to the database.\n ** If the atomic-update optimization is being used, this sync will not\n ** create the journal file or perform any real IO.\n **\n ** Because the change-counter page was just modified, unless the\n ** atomic-update optimization is used it is almost certain that the\n ** journal requires a sync here. However, in locking_mode=exclusive\n ** on a system under memory pressure it is just possible that this is\n ** not the case. In this case it is likely enough that the redundant\n ** xSync() call will be changed to a no-op by the OS anyhow.\n */\n rc = syncJournal(pPager, 0);\n if( rc!=SQLITE_OK ) goto commit_phase_one_exit;\n\n pList = sqlite3PcacheDirtyList(pPager->pPCache);\n#ifdef SQLITE_ENABLE_BATCH_ATOMIC_WRITE\n if( bBatch ){\n rc = sqlite3OsFileControl(fd, SQLITE_FCNTL_BEGIN_ATOMIC_WRITE, 0);\n if( rc==SQLITE_OK ){\n rc = pager_write_pagelist(pPager, pList);\n if( rc==SQLITE_OK ){\n rc = sqlite3OsFileControl(fd, SQLITE_FCNTL_COMMIT_ATOMIC_WRITE, 0);\n }\n if( rc!=SQLITE_OK ){\n sqlite3OsFileControlHint(fd, SQLITE_FCNTL_ROLLBACK_ATOMIC_WRITE, 0);\n }\n }\n\n if( (rc&0xFF)==SQLITE_IOERR && rc!=SQLITE_IOERR_NOMEM ){\n rc = sqlite3JournalCreate(pPager->jfd);\n if( rc!=SQLITE_OK ){\n sqlite3OsClose(pPager->jfd);\n goto commit_phase_one_exit;\n }\n bBatch = 0;\n }else{\n sqlite3OsClose(pPager->jfd);\n }\n }\n#endif /* SQLITE_ENABLE_BATCH_ATOMIC_WRITE */\n\n if( bBatch==0 ){\n rc = pager_write_pagelist(pPager, pList);\n }\n if( rc!=SQLITE_OK ){\n assert( rc!=SQLITE_IOERR_BLOCKED );\n goto commit_phase_one_exit;\n }\n sqlite3PcacheCleanAll(pPager->pPCache);\n\n /* If the file on disk is smaller than the database image, use\n ** pager_truncate to grow the file here. This can happen if the database\n ** image was extended as part of the current transaction and then the\n ** last page in the db image moved to the free-list. In this case the\n ** last page is never written out to disk, leaving the database file\n ** undersized. Fix this now if it is the case. */\n if( pPager->dbSize>pPager->dbFileSize ){\n Pgno nNew = pPager->dbSize - (pPager->dbSize==PAGER_SJ_PGNO(pPager));\n assert( pPager->eState==PAGER_WRITER_DBMOD );\n rc = pager_truncate(pPager, nNew);\n if( rc!=SQLITE_OK ) goto commit_phase_one_exit;\n }\n\n /* Finally, sync the database file. */\n if( !noSync ){\n rc = sqlite3PagerSync(pPager, zSuper);\n }\n IOTRACE((\"DBSYNC %p\\n\", pPager))\n }\n }\n\ncommit_phase_one_exit:\n if( rc==SQLITE_OK && !pagerUseWal(pPager) ){\n pPager->eState = PAGER_WRITER_FINISHED;\n }\n return rc;\n}", "docstring": "/*\n** Sync the database file for the pager pPager. zSuper points to the name\n** of a super-journal file that should be written into the individual\n** journal file. zSuper may be NULL, which is interpreted as no\n** super-journal (a single database transaction).\n**\n** This routine ensures that:\n**\n** * The database file change-counter is updated,\n** * the journal is synced (unless the atomic-write optimization is used),\n** * all dirty pages are written to the database file,\n** * the database file is truncated (if required), and\n** * the database file synced.\n**\n** The only thing that remains to commit the transaction is to finalize\n** (delete, truncate or zero the first part of) the journal file (or\n** delete the super-journal file if specified).\n**\n** Note that if zSuper==NULL, this does not overwrite a previous value\n** passed to an sqlite3PagerCommitPhaseOne() call.\n**\n** If the final parameter - noSync - is true, then the database file itself\n** is not synced. The caller must call sqlite3PagerSync() directly to\n** sync the database file before calling CommitPhaseTwo() to delete the\n** journal file in this case.\n*/", "url": "https://github.com/plasma-umass/sqlwrite/blob/099aec4292676b58c179e4d6e11635e624fcd289/sqlite3.c#L61867-L62079", "sha": "099aec4292676b58c179e4d6e11635e624fcd289"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sqlite3PagerWalFramesize", "code": "SQLITE_PRIVATE int sqlite3PagerWalFramesize(Pager *pPager){\n assert( pPager->eState>=PAGER_READER );\n return sqlite3WalFramesize(pPager->pWal);\n}", "docstring": "/*\n** A read-lock must be held on the pager when this function is called. If\n** the pager is in WAL mode and the WAL file currently contains one or more\n** frames, return the size in bytes of the page images stored within the\n** WAL frames. Otherwise, if this is not a WAL database or the WAL file\n** is empty, return 0.\n*/", "url": "https://github.com/plasma-umass/sqlwrite/blob/099aec4292676b58c179e4d6e11635e624fcd289/sqlite3.c#L63209-L63212", "sha": "099aec4292676b58c179e4d6e11635e624fcd289"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "geopolyCcwFunc", "code": "static void geopolyCcwFunc(\n sqlite3_context *context,\n int argc,\n sqlite3_value **argv\n){\n GeoPoly *p = geopolyFuncParam(context, argv[0], 0);\n (void)argc;\n if( p ){\n if( geopolyArea(p)<0.0 ){\n int ii, jj;\n for(ii=1, jj=p->nVertex-1; iihdr,\n 4+8*p->nVertex, SQLITE_TRANSIENT);\n sqlite3_free(p);\n }\n}", "docstring": "/*\n** Implementation of the geopoly_ccw(X) function.\n**\n** If the rotation of polygon X is clockwise (incorrect) instead of\n** counter-clockwise (the correct winding order according to RFC7946)\n** then reverse the order of the vertexes in polygon X.\n**\n** In other words, this routine returns a CCW polygon regardless of the\n** winding order of its input.\n**\n** Use this routine to sanitize historical inputs that that sometimes\n** contain polygons that wind in the wrong direction.\n*/", "url": "https://github.com/plasma-umass/sqlwrite/blob/099aec4292676b58c179e4d6e11635e624fcd289/sqlite3.c#L206454-L206477", "sha": "099aec4292676b58c179e4d6e11635e624fcd289"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "geopolyBBoxFunc", "code": "static void geopolyBBoxFunc(\n sqlite3_context *context,\n int argc,\n sqlite3_value **argv\n){\n GeoPoly *p = geopolyBBox(context, argv[0], 0, 0);\n (void)argc;\n if( p ){\n sqlite3_result_blob(context, p->hdr,\n 4+8*p->nVertex, SQLITE_TRANSIENT);\n sqlite3_free(p);\n }\n}", "docstring": "/*\n** Implementation of the geopoly_bbox(X) SQL function.\n*/", "url": "https://github.com/plasma-umass/sqlwrite/blob/099aec4292676b58c179e4d6e11635e624fcd289/sqlite3.c#L206619-L206631", "sha": "099aec4292676b58c179e4d6e11635e624fcd289"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fts5DecodeStructure", "code": "static void fts5DecodeStructure(\n int *pRc, /* IN/OUT: error code */\n Fts5Buffer *pBuf,\n const u8 *pBlob, int nBlob\n){\n int rc; /* Return code */\n Fts5Structure *p = 0; /* Decoded structure object */\n\n rc = fts5StructureDecode(pBlob, nBlob, 0, &p);\n if( rc!=SQLITE_OK ){\n *pRc = rc;\n return;\n }\n\n fts5DebugStructure(pRc, pBuf, p);\n fts5StructureRelease(p);\n}", "docstring": "/*\n** This is part of the fts5_decode() debugging aid.\n**\n** Arguments pBlob/nBlob contain a serialized Fts5Structure object. This\n** function appends a human-readable representation of the same object\n** to the buffer passed as the second argument.\n*/", "url": "https://github.com/plasma-umass/sqlwrite/blob/099aec4292676b58c179e4d6e11635e624fcd289/sqlite3.c#L237040-L237056", "sha": "099aec4292676b58c179e4d6e11635e624fcd289"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "__Pyx_pretend_to_initialize", "code": "static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }", "docstring": "/* __GNUC__ */", "url": "https://github.com/JunityZhan/Understanding-VITS/blob/908a46a2af87606c3039a402cc7af3984ef8aafd/monotonic_align/core.c#L817-L817", "sha": "908a46a2af87606c3039a402cc7af3984ef8aafd"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "switchclientdesktop", "code": "void\nswitchclientdesktop(state_t* s, client_t* cl, int32_t desktop) {\n char** names = (char**)malloc(s->monfocus->desktopcount * sizeof(char*));\n for (size_t i = 0; i < s->monfocus->desktopcount ; i++) {\n names[i] = strdup(s->monfocus->activedesktops[i].name);\n }\n // Create the desktop if it was not created yet\n if(!strinarr(names, s->monfocus->desktopcount, s->config.desktopnames[desktop])) {\n createdesktop(s, desktop, s->monfocus);\n }\n free(names);\n\n cl->desktop = desktop;\n if(cl == s->focus) {\n unfocusclient(s, cl);\n }\n hideclient(s, cl);\n makelayout(s, s->monfocus);\n}", "docstring": "/**\n * @brief Switches the desktop of a given client and hides that client.\n *\n * @param s The window manager's state\n * @param cl The client to switch the desktop of\n * @param desktop The desktop to set the client of \n */", "url": "https://github.com/cococry/ragnar/blob/8b95f82411d8956a29fb556cc6a9ab721492e985/src/ragnar.c#L1283-L1301", "sha": "8b95f82411d8956a29fb556cc6a9ab721492e985"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "makeMaps_e", "code": "static\nvoid makeMaps_e ( EState* s )\n{\n Int32 i;\n s->nInUse = 0;\n for (i = 0; i < 256; i++)\n if (s->inUse[i]) {\n s->unseqToSeq[i] = s->nInUse;\n s->nInUse++;\n }\n}", "docstring": "/*---------------------------------------------------*/\n/*--- The back end proper ---*/\n/*---------------------------------------------------*/\n/*---------------------------------------------------*/", "url": "https://github.com/2004Scape/Server/blob/408c538428f8fb0cd7fd387ebece0a8c9cbcac57/src/3rdparty/bzip2-wasm/bzip2-1.0.8/compress.c#L105-L115", "sha": "408c538428f8fb0cd7fd387ebece0a8c9cbcac57"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ggml_compute_forward_conv_transpose_1d_f16_f32", "code": "static void ggml_compute_forward_conv_transpose_1d_f16_f32(\n const struct ggml_compute_params * params,\n const struct ggml_tensor * src0,\n const struct ggml_tensor * src1,\n struct ggml_tensor * dst) {\n GGML_ASSERT(src0->type == GGML_TYPE_F16);\n GGML_ASSERT(src1->type == GGML_TYPE_F32);\n GGML_ASSERT( dst->type == GGML_TYPE_F32);\n\n int64_t t0 = ggml_perf_time_us();\n UNUSED(t0);\n\n GGML_TENSOR_BINARY_OP_LOCALS\n\n const int ith = params->ith;\n const int nth = params->nth;\n\n const int nk = ne00*ne01*ne02;\n\n GGML_ASSERT(nb00 == sizeof(ggml_fp16_t));\n GGML_ASSERT(nb10 == sizeof(float));\n\n if (params->type == GGML_TASK_INIT) {\n memset(params->wdata, 0, params->wsize);\n\n // permute kernel data (src0) from (K x Cout x Cin) to (Cin x K x Cout)\n {\n ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0;\n\n for (int64_t i02 = 0; i02 < ne02; i02++) {\n for (int64_t i01 = 0; i01 < ne01; i01++) {\n const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i02*nb02 + i01*nb01);\n ggml_fp16_t * dst_data = wdata + i01*ne00*ne02;\n for (int64_t i00 = 0; i00 < ne00; i00++) {\n dst_data[i00*ne02 + i02] = src[i00];\n }\n }\n }\n }\n\n // permute source data (src1) from (L x Cin) to (Cin x L)\n {\n ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + nk;\n ggml_fp16_t * dst_data = wdata;\n\n for (int64_t i11 = 0; i11 < ne11; i11++) {\n const float * const src = (float *)((char *) src1->data + i11*nb11);\n for (int64_t i10 = 0; i10 < ne10; i10++) {\n dst_data[i10*ne11 + i11] = GGML_FP32_TO_FP16(src[i10]);\n }\n }\n }\n\n // need to zero dst since we are accumulating into it\n memset(dst->data, 0, ggml_nbytes(dst));\n\n return;\n }\n\n if (params->type == GGML_TASK_FINALIZE) {\n return;\n }\n\n const int32_t s0 = ((const int32_t*)(dst->op_params))[0];\n\n // total rows in dst\n const int nr = ne1;\n\n // rows per thread\n const int dr = (nr + nth - 1)/nth;\n\n // row range for this thread\n const int ir0 = dr*ith;\n const int ir1 = MIN(ir0 + dr, nr);\n\n ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0;\n ggml_fp16_t * const wdata_src = wdata + nk;\n\n for (int i1 = ir0; i1 < ir1; i1++) {\n float * dst_data = (float *)((char *) dst->data + i1*nb1);\n ggml_fp16_t * wdata_kernel = wdata + i1*ne02*ne00;\n for (int i10 = 0; i10 < ne10; i10++) {\n const int i1n = i10*ne11;\n for (int i00 = 0; i00 < ne00; i00++) {\n float v = 0;\n ggml_vec_dot_f16(ne02, &v,\n (ggml_fp16_t *) wdata_src + i1n,\n (ggml_fp16_t *) wdata_kernel + i00*ne02);\n dst_data[i10*s0 + i00] += v;\n }\n }\n }\n}", "docstring": "// ggml_compute_forward_conv_transpose_1d", "url": "https://github.com/yeyupiaoling/Whisper-Finetune/blob/e763980251b3307e6518ba2a352f56192bc1c43b/AndroidDemo/app/src/main/jni/whisper/libwhisper/ggml.c#L11502-L11594", "sha": "e763980251b3307e6518ba2a352f56192bc1c43b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "defaultLoading", "code": "function defaultLoading(api, opts) {\n opts = opts || {};\n defaults(opts, {\n text: 'loading',\n textColor: '#000',\n fontSize: 12,\n fontWeight: 'normal',\n fontStyle: 'normal',\n fontFamily: 'sans-serif',\n maskColor: 'rgba(255, 255, 255, 0.8)',\n showSpinner: true,\n color: '#5470c6',\n spinnerRadius: 10,\n lineWidth: 5,\n zlevel: 0\n });\n var group = new Group();\n var mask = new Rect({\n style: {\n fill: opts.maskColor\n },\n zlevel: opts.zlevel,\n z: 10000\n });\n group.add(mask);\n var textContent = new ZRText({\n style: {\n text: opts.text,\n fill: opts.textColor,\n fontSize: opts.fontSize,\n fontWeight: opts.fontWeight,\n fontStyle: opts.fontStyle,\n fontFamily: opts.fontFamily\n },\n zlevel: opts.zlevel,\n z: 10001\n });\n var labelRect = new Rect({\n style: {\n fill: 'none'\n },\n textContent: textContent,\n textConfig: {\n position: 'right',\n distance: 10\n },\n zlevel: opts.zlevel,\n z: 10001\n });\n group.add(labelRect);\n var arc;\n\n if (opts.showSpinner) {\n arc = new Arc({\n shape: {\n startAngle: -PI$3 / 2,\n endAngle: -PI$3 / 2 + 0.1,\n r: opts.spinnerRadius\n },\n style: {\n stroke: opts.color,\n lineCap: 'round',\n lineWidth: opts.lineWidth\n },\n zlevel: opts.zlevel,\n z: 10001\n });\n arc.animateShape(true).when(1000, {\n endAngle: PI$3 * 3 / 2\n }).start('circularInOut');\n arc.animateShape(true).when(1000, {\n startAngle: PI$3 * 3 / 2\n }).delay(300).start('circularInOut');\n group.add(arc);\n } // Inject resize\n\n\n group.resize = function () {\n var textWidth = textContent.getBoundingRect().width;\n var r = opts.showSpinner ? opts.spinnerRadius : 0; // cx = (containerWidth - arcDiameter - textDistance - textWidth) / 2\n // textDistance needs to be calculated when both animation and text exist\n\n var cx = (api.getWidth() - r * 2 - (opts.showSpinner && textWidth ? 10 : 0) - textWidth) / 2 - (opts.showSpinner && textWidth ? 0 : 5 + textWidth / 2) // only show the text\n + (opts.showSpinner ? 0 : textWidth / 2) // only show the spinner\n + (textWidth ? 0 : r);\n var cy = api.getHeight() / 2;\n opts.showSpinner && arc.setShape({\n cx: cx,\n cy: cy\n });\n labelRect.setShape({\n x: cx - r,\n y: cy - r,\n width: r * 2,\n height: r * 2\n });\n mask.setShape({\n x: 0,\n y: 0,\n width: api.getWidth(),\n height: api.getHeight()\n });\n };\n\n group.resize();\n return group;\n }", "docstring": "/**\n * @param {module:echarts/ExtensionAPI} api\n * @param {Object} [opts]\n * @param {string} [opts.text]\n * @param {string} [opts.color]\n * @param {string} [opts.textColor]\n * @return {module:zrender/Element}\n */", "url": "https://github.com/Allenkuzma/langhaiblogs/blob/a77bd2103800a65f11812b04afb506aeea9c9eae/src/main/resources/static/echarts/echarts.js#L25325-L25431", "sha": "a77bd2103800a65f11812b04afb506aeea9c9eae"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "radialCoordinate", "code": "function radialCoordinate(rad, r) {\n rad -= Math.PI / 2;\n return {\n x: r * Math.cos(rad),\n y: r * Math.sin(rad)\n };\n }", "docstring": "/**\n * Transform the common coordinate to radial coordinate.\n */", "url": "https://github.com/Allenkuzma/langhaiblogs/blob/a77bd2103800a65f11812b04afb506aeea9c9eae/src/main/resources/static/pear/component/pear/module/echarts.js#L54204-L54210", "sha": "a77bd2103800a65f11812b04afb506aeea9c9eae"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "shouldUseClickEvent", "code": "function shouldUseClickEvent(elem) {\n // Use the `click` event to detect changes to checkbox and radio inputs.\n // This approach works across all browsers, whereas `change` does not fire\n // until `blur` in IE8.\n var nodeName = elem.nodeName;\n return nodeName && nodeName.toLowerCase() === 'input' && (elem.type === 'checkbox' || elem.type === 'radio');\n }", "docstring": "/**\n * SECTION: handle `click` event\n */", "url": "https://github.com/toniebox-reverse-engineering/teddycloud/blob/83d3b29cbfc74e8f76f48f8782646c8e464055b2/contrib/data/www/library/react-dom.development.js#L9350-L9356", "sha": "83d3b29cbfc74e8f76f48f8782646c8e464055b2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Component", "code": "function Component(props, context, updater) {\n this.props = props;\n this.context = context; // If a component has string refs, we will assign a different object later.\n\n this.refs = emptyObject; // We initialize the default updater but the real one gets injected by the\n // renderer.\n\n this.updater = updater || ReactNoopUpdateQueue;\n }", "docstring": "/**\n * Base class helpers for the updating state of a component.\n */", "url": "https://github.com/toniebox-reverse-engineering/teddycloud/blob/83d3b29cbfc74e8f76f48f8782646c8e464055b2/contrib/data/www/library/react.development.js#L513-L521", "sha": "83d3b29cbfc74e8f76f48f8782646c8e464055b2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Var", "code": "function Var( id ) {\n\t\tthis.id = id;\n\t\tthis.ground = false;\n\t}", "docstring": "// PROLOG OBJECTS", "url": "https://github.com/toblotron/praxis-ide/blob/b4f732c28f0280ead015094bce893a378a28bec2/lib/tau-prolog.0.3.4.js#L1619-L1622", "sha": "b4f732c28f0280ead015094bce893a378a28bec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "buildTokenBadBidi", "code": "function buildTokenBadBidi(inner, order) {\n return function (builder, text, style, startStyle, endStyle, title, css) {\n style = style ? style + \" cm-force-border\" : \"cm-force-border\"\n var start = builder.pos, end = start + text.length\n for (;;) {\n // Find the part that overlaps with the start of this text\n var part = (void 0)\n for (var i = 0; i < order.length; i++) {\n part = order[i]\n if (part.to > start && part.from <= start) { break }\n }\n if (part.to >= end) { return inner(builder, text, style, startStyle, endStyle, title, css) }\n inner(builder, text.slice(0, part.to - start), style, startStyle, null, title, css)\n startStyle = null\n text = text.slice(part.to - start)\n start = part.to\n }\n }\n}", "docstring": "// Work around nonsense dimensions being reported for stretches of", "url": "https://github.com/toblotron/praxis-ide/blob/b4f732c28f0280ead015094bce893a378a28bec2/lib/codemirror/lib/codemirror.js#L1949-L1967", "sha": "b4f732c28f0280ead015094bce893a378a28bec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Fancytree", "code": "function Fancytree(widget) {\n\t\tthis.widget = widget;\n\t\tthis.$div = widget.element;\n\t\tthis.options = widget.options;\n\t\tif (this.options) {\n\t\t\tif (this.options.lazyload !== undefined) {\n\t\t\t\t$.error(\n\t\t\t\t\t\"The 'lazyload' event is deprecated since 2014-02-25. Use 'lazyLoad' (with uppercase L) instead.\"\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (this.options.loaderror !== undefined) {\n\t\t\t\t$.error(\n\t\t\t\t\t\"The 'loaderror' event was renamed since 2014-07-03. Use 'loadError' (with uppercase E) instead.\"\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (this.options.fx !== undefined) {\n\t\t\t\t$.error(\n\t\t\t\t\t\"The 'fx' option was replaced by 'toggleEffect' since 2014-11-30.\"\n\t\t\t\t);\n\t\t\t}\n\t\t\tif (this.options.removeNode !== undefined) {\n\t\t\t\t$.error(\n\t\t\t\t\t\"The 'removeNode' event was replaced by 'modifyChild' since 2.20 (2016-09-10).\"\n\t\t\t\t);\n\t\t\t}\n\t\t}\n\t\tthis.ext = {}; // Active extension instances\n\t\tthis.types = {};\n\t\tthis.columns = {};\n\t\t// allow to init tree.data.foo from
\n\t\tthis.data = _getElementDataAsDict(this.$div);\n\t\t// TODO: use widget.uuid instead?\n\t\tthis._id = \"\" + (this.options.treeId || $.ui.fancytree._nextId++);\n\t\t// TODO: use widget.eventNamespace instead?\n\t\tthis._ns = \".fancytree-\" + this._id; // append for namespaced events\n\t\tthis.activeNode = null;\n\t\tthis.focusNode = null;\n\t\tthis._hasFocus = null;\n\t\tthis._tempCache = {};\n\t\tthis._lastMousedownNode = null;\n\t\tthis._enableUpdate = true;\n\t\tthis.lastSelectedNode = null;\n\t\tthis.systemFocusElement = null;\n\t\tthis.lastQuicksearchTerm = \"\";\n\t\tthis.lastQuicksearchTime = 0;\n\t\tthis.viewport = null; // ext-grid\n\n\t\tthis.statusClassPropName = \"span\";\n\t\tthis.ariaPropName = \"li\";\n\t\tthis.nodeContainerAttrName = \"li\";\n\n\t\t// Remove previous markup if any\n\t\tthis.$div.find(\">ul.fancytree-container\").remove();\n\n\t\t// Create a node without parent.\n\t\tvar fakeParent = { tree: this },\n\t\t\t$ul;\n\t\tthis.rootNode = new FancytreeNode(fakeParent, {\n\t\t\ttitle: \"root\",\n\t\t\tkey: \"root_\" + this._id,\n\t\t\tchildren: null,\n\t\t\texpanded: true,\n\t\t});\n\t\tthis.rootNode.parent = null;\n\n\t\t// Create root markup\n\t\t$ul = $(\"
    \", {\n\t\t\tid: \"ft-id-\" + this._id,\n\t\t\tclass: \"ui-fancytree fancytree-container fancytree-plain\",\n\t\t}).appendTo(this.$div);\n\t\tthis.$container = $ul;\n\t\tthis.rootNode.ul = $ul[0];\n\n\t\tif (this.options.debugLevel == null) {\n\t\t\tthis.options.debugLevel = FT.debugLevel;\n\t\t}\n\t\t// // Add container to the TAB chain\n\t\t// // See http://www.w3.org/TR/wai-aria-practices/#focus_activedescendant\n\t\t// // #577: Allow to set tabindex to \"0\", \"-1\" and \"\"\n\t\t// this.$container.attr(\"tabindex\", this.options.tabindex);\n\n\t\t// if( this.options.rtl ) {\n\t\t// \tthis.$container.attr(\"DIR\", \"RTL\").addClass(\"fancytree-rtl\");\n\t\t// // }else{\n\t\t// //\tthis.$container.attr(\"DIR\", null).removeClass(\"fancytree-rtl\");\n\t\t// }\n\t\t// if(this.options.aria){\n\t\t// \tthis.$container.attr(\"role\", \"tree\");\n\t\t// \tif( this.options.selectMode !== 1 ) {\n\t\t// \t\tthis.$container.attr(\"aria-multiselectable\", true);\n\t\t// \t}\n\t\t// }\n\t}", "docstring": "/******************************************************************************\n\t * Fancytree\n\t */", "url": "https://github.com/toblotron/praxis-ide/blob/b4f732c28f0280ead015094bce893a378a28bec2/lib/fancytree_dist/jquery.fancytree-all.js#L2632-L2724", "sha": "b4f732c28f0280ead015094bce893a378a28bec2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "stretchSpansOverChange", "code": "function stretchSpansOverChange(doc, change) {\n if (change.full) { return null }\n var oldFirst = isLine(doc, change.from.line) && getLine(doc, change.from.line).markedSpans;\n var oldLast = isLine(doc, change.to.line) && getLine(doc, change.to.line).markedSpans;\n if (!oldFirst && !oldLast) { return null }\n\n var startCh = change.from.ch, endCh = change.to.ch, isInsert = cmp(change.from, change.to) == 0;\n // Get the spans that 'stick out' on both sides\n var first = markedSpansBefore(oldFirst, startCh, isInsert);\n var last = markedSpansAfter(oldLast, endCh, isInsert);\n\n // Next, merge those two ends\n var sameLine = change.text.length == 1, offset = lst(change.text).length + (sameLine ? startCh : 0);\n if (first) {\n // Fix up .to properties of first\n for (var i = 0; i < first.length; ++i) {\n var span = first[i];\n if (span.to == null) {\n var found = getMarkedSpanFor(last, span.marker);\n if (!found) { span.to = startCh; }\n else if (sameLine) { span.to = found.to == null ? null : found.to + offset; }\n }\n }\n }\n if (last) {\n // Fix up .from in last (or move them into first in case of sameLine)\n for (var i$1 = 0; i$1 < last.length; ++i$1) {\n var span$1 = last[i$1];\n if (span$1.to != null) { span$1.to += offset; }\n if (span$1.from == null) {\n var found$1 = getMarkedSpanFor(first, span$1.marker);\n if (!found$1) {\n span$1.from = offset;\n if (sameLine) { (first || (first = [])).push(span$1); }\n }\n } else {\n span$1.from += offset;\n if (sameLine) { (first || (first = [])).push(span$1); }\n }\n }\n }\n // Make sure we didn't create any zero-length spans\n if (first) { first = clearEmptySpans(first); }\n if (last && last != first) { last = clearEmptySpans(last); }\n\n var newMarkers = [first];\n if (!sameLine) {\n // Fill gap with whole-line-spans\n var gap = change.text.length - 2, gapMarkers;\n if (gap > 0 && first)\n { for (var i$2 = 0; i$2 < first.length; ++i$2)\n { if (first[i$2].to == null)\n { (gapMarkers || (gapMarkers = [])).push(new MarkedSpan(first[i$2].marker, null, null)); } } }\n for (var i$3 = 0; i$3 < gap; ++i$3)\n { newMarkers.push(gapMarkers); }\n newMarkers.push(last);\n }\n return newMarkers\n}", "docstring": "// Given a change object, compute the new set of marker spans that", "url": "https://github.com/KouShenhai/KCloud-Platform-IoT/blob/d10ccb3e93bdec47d529e858ec31585d794700ab/laokou-cloud/laokou-nacos/src/main/resources/static/console-ui/public/js/codemirror.js#L607-L665", "sha": "d10ccb3e93bdec47d529e858ec31585d794700ab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "onerror", "code": "function onerror(error) {\n if (error)\n QUnit.pushFailure(error.message || error, error.stack);\n\n QUnit.start();\n}", "docstring": "/**\n * Set an assert error and re-start the test so it can fail\n */", "url": "https://github.com/Kurento/kurento/blob/b04c741488b60a10692f08d01e87b8bb08ef0300/clients/javascript/client/test/_common.js", "sha": "b04c741488b60a10692f08d01e87b8bb08ef0300"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isIndex", "code": "function isIndex(value, length) {\n value = (typeof value == 'number' || reIsUint.test(value)) ? +value : -1;\n length = length == null ? MAX_SAFE_INTEGER : length;\n return value > -1 && value % 1 == 0 && value < length;\n}", "docstring": "/**\n * Checks if `value` is a valid array-like index.\n *\n * @private\n * @param {*} value The value to check.\n * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.\n * @returns {boolean} Returns `true` if `value` is a valid index, else `false`.\n */", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/swagger/swagger-ui.js#L16318-L16322", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pipeResponseToStream", "code": "function pipeResponseToStream(response, output) {\n return __awaiter(this, void 0, void 0, function* () {\n const pipeline = util.promisify(stream.pipeline);\n yield pipeline(response.message, output);\n });\n}", "docstring": "/**\n * Pipes the body of a HTTP response to a stream\n *\n * @param response the HTTP response\n * @param output the writable stream\n */", "url": "https://github.com/DeterminateSystems/nix-installer-action/blob/a48face58194521af687ce7df4c802b1b558e743/dist/index.js#L814-L819", "sha": "a48face58194521af687ce7df4c802b1b558e743"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getOptions", "code": "function getOptions(copy) {\n const result = {\n followSymbolicLinks: true,\n implicitDescendants: true,\n omitBrokenSymbolicLinks: true\n };\n if (copy) {\n if (typeof copy.followSymbolicLinks === 'boolean') {\n result.followSymbolicLinks = copy.followSymbolicLinks;\n core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);\n }\n if (typeof copy.implicitDescendants === 'boolean') {\n result.implicitDescendants = copy.implicitDescendants;\n core.debug(`implicitDescendants '${result.implicitDescendants}'`);\n }\n if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {\n result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;\n core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);\n }\n }\n return result;\n}", "docstring": "/**\n * Returns a copy with defaults filled in.\n */", "url": "https://github.com/DeterminateSystems/nix-installer-action/blob/a48face58194521af687ce7df4c802b1b558e743/dist/index.js#L3570-L3591", "sha": "a48face58194521af687ce7df4c802b1b558e743"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "HttpHeadersImpl.delete", "code": "delete(name) {\n this._headersMap.delete(normalizeName(name));\n }", "docstring": "/**\n * Remove the header with the provided headerName.\n * @param name - The name of the header to remove.\n */", "url": "https://github.com/DeterminateSystems/nix-installer-action/blob/a48face58194521af687ce7df4c802b1b558e743/dist/index.js#L69795-L69797", "sha": "a48face58194521af687ce7df4c802b1b558e743"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "defaultRetryPolicy", "code": "function defaultRetryPolicy(options = {}) {\n var _a;\n return {\n name: exports.defaultRetryPolicyName,\n sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], {\n maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT,\n }).sendRequest,\n };\n}", "docstring": "/**\n * A policy that retries according to three strategies:\n * - When the server sends a 429 response with a Retry-After header.\n * - When there are errors in the underlying transport layer (e.g. DNS lookup failures).\n * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay.\n */", "url": "https://github.com/DeterminateSystems/nix-installer-action/blob/a48face58194521af687ce7df4c802b1b558e743/dist/index.js#L70980-L70988", "sha": "a48face58194521af687ce7df4c802b1b558e743"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "tlsPolicy", "code": "function tlsPolicy(tlsSettings) {\n return {\n name: exports.tlsPolicyName,\n sendRequest: async (req, next) => {\n // Users may define a request tlsSettings, honor those over the client level one\n if (!req.tlsSettings) {\n req.tlsSettings = tlsSettings;\n }\n return next(req);\n },\n };\n}", "docstring": "/**\n * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication.\n */", "url": "https://github.com/DeterminateSystems/nix-installer-action/blob/a48face58194521af687ce7df4c802b1b558e743/dist/index.js#L71836-L71847", "sha": "a48face58194521af687ce7df4c802b1b558e743"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "assertBoundFunction", "code": "function assertBoundFunction(value, message) {\n if (!isBoundFunction(value)) {\n throw new TypeError(message ?? typeErrorMessage('Function', value));\n }\n}", "docstring": "// eslint-disable-next-line @typescript-eslint/ban-types", "url": "https://github.com/DeterminateSystems/nix-installer-action/blob/a48face58194521af687ce7df4c802b1b558e743/dist/index.js#L76668-L76672", "sha": "a48face58194521af687ce7df4c802b1b558e743"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ScopeClass.update", "code": "update(captureContext) {\n if (!captureContext) {\n return this;\n }\n const scopeToMerge = typeof captureContext === \"function\" ? captureContext(this) : captureContext;\n const [scopeInstance, requestSession] = scopeToMerge instanceof Scope ? (\n // eslint-disable-next-line deprecation/deprecation\n [scopeToMerge.getScopeData(), scopeToMerge.getRequestSession()]\n ) : isPlainObject$5(scopeToMerge) ? [captureContext, captureContext.requestSession] : [];\n const { tags, extra, user, contexts, level, fingerprint = [], propagationContext } = scopeInstance || {};\n this._tags = { ...this._tags, ...tags };\n this._extra = { ...this._extra, ...extra };\n this._contexts = { ...this._contexts, ...contexts };\n if (user && Object.keys(user).length) {\n this._user = user;\n }\n if (level) {\n this._level = level;\n }\n if (fingerprint.length) {\n this._fingerprint = fingerprint;\n }\n if (propagationContext) {\n this._propagationContext = propagationContext;\n }\n if (requestSession) {\n this._requestSession = requestSession;\n }\n return this;\n }", "docstring": "/**\n * @inheritDoc\n */", "url": "https://github.com/comfyanonymous/ComfyUI/blob/af93c8d1ee4be91f30ffd395ea6919e6f83923aa/web/assets/index-DqqhYDnY.js#L7979-L8008", "sha": "af93c8d1ee4be91f30ffd395ea6919e6f83923aa"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "renderWaveDrom", "code": "function renderWaveDrom(id, json, style) {\n const skin = selectSkin(style);\n const renderObj = renderAny(id, json, skin);\n let svgString = onmlStringify(renderObj);\n \n // TODO: more elegant ? 这里是为了解决黑色模式下部分 rect 仍然是白色背景\n svgString = replaceRectsWithCustomString(svgString, style);\n\n return svgString;\n}", "docstring": "/**\n * \n * @param {number} id \n * @param {any} json \n * @param {'dark' | 'light'} style\n * @returns {string}\n */", "url": "https://github.com/Digital-EDA/Digital-IDE/blob/2f90a87a1c9df236ee621e586d8e56ac0e7aaa57/resources/wavedrom/index.js#L34-L43", "sha": "2f90a87a1c9df236ee621e586d8e56ac0e7aaa57"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "computeStyleTests", "code": "function computeStyleTests() {\n\n\t\t// This is a singleton, we need to execute it only once\n\t\tif ( !div ) {\n\t\t\treturn;\n\t\t}\n\n\t\tcontainer.style.cssText = \"position:absolute;left:-11111px;width:60px;\" +\n\t\t\t\"margin-top:1px;padding:0;border:0\";\n\t\tdiv.style.cssText =\n\t\t\t\"position:relative;display:block;box-sizing:border-box;overflow:scroll;\" +\n\t\t\t\"margin:auto;border:1px;padding:1px;\" +\n\t\t\t\"width:60%;top:1%\";\n\t\tdocumentElement.appendChild( container ).appendChild( div );\n\n\t\tvar divStyle = window.getComputedStyle( div );\n\t\tpixelPositionVal = divStyle.top !== \"1%\";\n\n\t\t// Support: Android 4.0 - 4.3 only, Firefox <=3 - 44\n\t\treliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12;\n\n\t\t// Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3\n\t\t// Some styles come back with percentage values, even though they shouldn't\n\t\tdiv.style.right = \"60%\";\n\t\tpixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36;\n\n\t\t// Support: IE 9 - 11 only\n\t\t// Detect misreporting of content dimensions for box-sizing:border-box elements\n\t\tboxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36;\n\n\t\t// Support: IE 9 only\n\t\t// Detect overflow:scroll screwiness (gh-3699)\n\t\t// Support: Chrome <=64\n\t\t// Don't get tricked when zoom affects offsetWidth (gh-4029)\n\t\tdiv.style.position = \"absolute\";\n\t\tscrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12;\n\n\t\tdocumentElement.removeChild( container );\n\n\t\t// Nullify the div so it wouldn't be stored in the memory and\n\t\t// it will also be a sign that checks already performed\n\t\tdiv = null;\n\t}", "docstring": "// Executing both pixelPosition & boxSizingReliable tests require only one layout", "url": "https://github.com/dotnet/beginner-series/blob/7094e2e5eed96c5a26d7aeef240a95f35201c063/Visual Studio/sample-code/RazorPagesCupcakes/wwwroot/lib/jquery/dist/jquery.js#L6439-L6481", "sha": "7094e2e5eed96c5a26d7aeef240a95f35201c063"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DynamicFont.dispose", "code": "dispose() {\n\t\tthis._fontAtlas.clear();\n\t\tthis._font.chars.length = 0;\n\t}", "docstring": "/**\n\t * Dispose the font data and atlas.\n\t */", "url": "https://github.com/uinosoft/t3d.js/blob/b746a7f5c17a58c6711fe8ce71788301ad474363/examples/jsm/DynamicFont.js#L91-L94", "sha": "b746a7f5c17a58c6711fe8ce71788301ad474363"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GLTFExporter.parse", "code": "parse(input, onDone, onError, options) {\n\t\tconst writer = new GLTFWriter();\n\n\t\tconst plugins = this.extensions.map(_ext => new _ext(writer));\n\n\t\twriter.setPlugins(plugins);\n\n\t\twriter.dracoOptions = this.dracoOptions;\n\t\twriter.setDRACOExporter(this._dracoExporter);\n\n\t\twriter.writeAsync(input, onDone, options).catch(onError);\n\t}", "docstring": "/**\n\t * Parse input root object(s) and generate GLTF output\n\t * @param {Object3D or [Object3D]} input root object(s)\n\t * @param {Function} onDone Callback on completed\n\t * @param {Function} onError Callback on errors\n\t * @param {Object} options options\n\t */", "url": "https://github.com/uinosoft/t3d.js/blob/b746a7f5c17a58c6711fe8ce71788301ad474363/examples/jsm/exporters/GLTFExporter.js#L60-L71", "sha": "b746a7f5c17a58c6711fe8ce71788301ad474363"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RenderStates.updateCamera", "code": "updateCamera(camera) {\n\t\tconst sceneData = this.scene;\n\t\tconst cameraData = this.camera;\n\t\tconst projectionMatrix = camera.projectionMatrix;\n\n\t\tlet cameraNear = 0, cameraFar = 0;\n\t\tif (_isPerspectiveMatrix(projectionMatrix)) {\n\t\t\tcameraNear = projectionMatrix.elements[14] / (projectionMatrix.elements[10] - 1);\n\t\t\tcameraFar = projectionMatrix.elements[14] / (projectionMatrix.elements[10] + 1);\n\t\t} else {\n\t\t\tcameraNear = (projectionMatrix.elements[14] + 1) / projectionMatrix.elements[10];\n\t\t\tcameraFar = (projectionMatrix.elements[14] - 1) / projectionMatrix.elements[10];\n\t\t}\n\n\t\tcameraData.near = cameraNear;\n\t\tcameraData.far = cameraFar;\n\n\t\tif (sceneData.logarithmicDepthBuffer) {\n\t\t\tcameraData.logDepthCameraNear = cameraNear;\n\t\t\tcameraData.logDepthBufFC = 2.0 / (Math.log(cameraFar - cameraNear + 1.0) * Math.LOG2E);\n\t\t} else {\n\t\t\tcameraData.logDepthCameraNear = 0;\n\t\t\tcameraData.logDepthBufFC = 0;\n\t\t}\n\n\t\tcameraData.position.setFromMatrixPosition(camera.worldMatrix);\n\t\tif (sceneData.useAnchorMatrix) {\n\t\t\tcameraData.position.applyMatrix4(sceneData.anchorMatrixInverse);\n\t\t}\n\n\t\tcameraData.viewMatrix.copy(camera.viewMatrix);\n\t\tif (sceneData.useAnchorMatrix) {\n\t\t\tcameraData.viewMatrix.multiply(sceneData.anchorMatrix);\n\t\t}\n\n\t\tcameraData.projectionMatrix.copy(projectionMatrix);\n\t\tcameraData.projectionViewMatrix.copy(projectionMatrix).multiply(cameraData.viewMatrix);\n\n\t\tcameraData.rect.copy(camera.rect);\n\n\t\tcameraData.version++;\n\n\t\tthis.gammaFactor = camera.gammaFactor;\n\t\tthis.outputEncoding = camera.outputEncoding;\n\t}", "docstring": "/**\n\t * Update render states about camera.\n\t * @param {t3d.Camera}\n\t */", "url": "https://github.com/uinosoft/t3d.js/blob/b746a7f5c17a58c6711fe8ce71788301ad474363/src/render/RenderStates.js#L44-L88", "sha": "b746a7f5c17a58c6711fe8ce71788301ad474363"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ThinRenderer.updateRenderTargetMipmap", "code": "updateRenderTargetMipmap(renderTarget) {}", "docstring": "/**\n\t * Reads the pixel data from the current render target into the provided buffer.\n\t * The Renderer.asyncReadPixel property determines whether this operation is synchronous or asynchronous.\n\t * To maintain consistency, this method always returns a Promise object.\n\t * @param {Number} x - The x coordinate of the rectangle to read from.\n\t * @param {Number} y - The y coordinate of the rectangle to read from.\n\t * @param {Number} width - The width of the rectangle to read from.\n\t * @param {Number} height - The height of the rectangle to read from.\n\t * @param {TypedArray} buffer Uint8Array is the only destination type supported in all cases, other types are renderTarget and platform dependent.\n\t * @return {Promise} A promise that resolves with the passed in buffer after it has been filled with the pixel data.\n\t */", "url": "https://github.com/uinosoft/t3d.js/blob/b746a7f5c17a58c6711fe8ce71788301ad474363/src/render/ThinRenderer.js#L191-L191", "sha": "b746a7f5c17a58c6711fe8ce71788301ad474363"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "random", "code": "function random(sd) {\n var d, e, k, n,\n i = 0,\n r = new this(1),\n rd = [];\n\n if (sd === void 0) sd = this.precision;\n else checkInt32(sd, 1, MAX_DIGITS);\n\n k = Math.ceil(sd / LOG_BASE);\n\n if (!this.crypto) {\n for (; i < k;) rd[i++] = Math.random() * 1e7 | 0;\n\n // Browsers supporting crypto.getRandomValues.\n } else if (crypto.getRandomValues) {\n d = crypto.getRandomValues(new Uint32Array(k));\n\n for (; i < k;) {\n n = d[i];\n\n // 0 <= n < 4294967296\n // Probability n >= 4.29e9, is 4967296 / 4294967296 = 0.00116 (1 in 865).\n if (n >= 4.29e9) {\n d[i] = crypto.getRandomValues(new Uint32Array(1))[0];\n } else {\n\n // 0 <= n <= 4289999999\n // 0 <= (n % 1e7) <= 9999999\n rd[i++] = n % 1e7;\n }\n }\n\n // Node.js supporting crypto.randomBytes.\n } else if (crypto.randomBytes) {\n\n // buffer\n d = crypto.randomBytes(k *= 4);\n\n for (; i < k;) {\n\n // 0 <= n < 2147483648\n n = d[i] + (d[i + 1] << 8) + (d[i + 2] << 16) + ((d[i + 3] & 0x7f) << 24);\n\n // Probability n >= 2.14e9, is 7483648 / 2147483648 = 0.0035 (1 in 286).\n if (n >= 2.14e9) {\n crypto.randomBytes(4).copy(d, i);\n } else {\n\n // 0 <= n <= 2139999999\n // 0 <= (n % 1e7) <= 9999999\n rd.push(n % 1e7);\n i += 4;\n }\n }\n\n i = k / 4;\n } else {\n throw Error(cryptoUnavailable);\n }\n\n k = rd[--i];\n sd %= LOG_BASE;\n\n // Convert trailing digits to zeros according to sd.\n if (k && sd) {\n n = mathpow(10, LOG_BASE - sd);\n rd[i] = (k / n | 0) * n;\n }\n\n // Remove trailing words which are zero.\n for (; rd[i] === 0; i--) rd.pop();\n\n // Zero?\n if (i < 0) {\n e = 0;\n rd = [0];\n } else {\n e = -1;\n\n // Remove leading words which are zero and adjust exponent accordingly.\n for (; rd[0] === 0; e -= LOG_BASE) rd.shift();\n\n // Count the digits of the first word of rd to determine leading zeros.\n for (k = 1, n = rd[0]; n >= 10; n /= 10) k++;\n\n // Adjust the exponent for leading zeros of the first word of rd.\n if (k < LOG_BASE) e -= LOG_BASE - k;\n }\n\n r.e = e;\n r.d = rd;\n\n return r;\n }", "docstring": "/*\n * Returns a new Decimal with a random value equal to or greater than 0 and less than 1, and with\n * `sd`, or `Decimal.precision` if `sd` is omitted, significant digits (or less if trailing zeros\n * are produced).\n *\n * [sd] {number} Significant digits. Integer, 0 to MAX_DIGITS inclusive.\n *\n */", "url": "https://github.com/bnmgh1/node-sandbox/blob/e39d2a8e5ca530039e03055f94199f22c3248b0a/node_modules/decimal.js/decimal.js#L4659-L4753", "sha": "e39d2a8e5ca530039e03055f94199f22c3248b0a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SelectionImpl.anchorNode", "code": "get anchorNode() {\n const anchor = this._anchor;\n return anchor ? anchor.node : null;\n }", "docstring": "// https://w3c.github.io/selection-api/#dom-selection-anchornode", "url": "https://github.com/bnmgh1/node-sandbox/blob/e39d2a8e5ca530039e03055f94199f22c3248b0a/node_modules/jsdom/lib/jsdom/living/selection/Selection-impl.js#L32-L35", "sha": "e39d2a8e5ca530039e03055f94199f22c3248b0a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_unmask", "code": "function _unmask(buffer, mask) {\n for (let i = 0; i < buffer.length; i++) {\n buffer[i] ^= mask[i & 3];\n }\n}", "docstring": "/**\n * Unmasks a buffer using the given mask.\n *\n * @param {Buffer} buffer The buffer to unmask\n * @param {Buffer} mask The mask to use\n * @public\n */", "url": "https://github.com/bnmgh1/node-sandbox/blob/e39d2a8e5ca530039e03055f94199f22c3248b0a/node_modules/ws/lib/buffer-util.js#L58-L62", "sha": "e39d2a8e5ca530039e03055f94199f22c3248b0a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createStrictSyntaxError", "code": "function createStrictSyntaxError (str, char) {\n var index = str.indexOf(char)\n var partial = index !== -1\n ? str.substring(0, index) + '#'\n : ''\n\n try {\n JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation')\n } catch (e) {\n return normalizeJsonSyntaxError(e, {\n message: e.message.replace('#', char),\n stack: e.stack\n })\n }\n}", "docstring": "/**\n * Create strict violation syntax error matching native error.\n *\n * @param {string} str\n * @param {string} char\n * @return {Error}\n * @private\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/6.EXPRESS-JS/File-Uploads2/1.Images-Upload/node_modules/body-parser/lib/types/json.js#L153-L167", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mapCacheHas", "code": "function mapCacheHas(key) {\n return getMapData(this, key).has(key);\n}", "docstring": "/**\n * Checks if a map value for `key` exists.\n *\n * @private\n * @name has\n * @memberOf MapCache\n * @param {string} key The key of the entry to check.\n * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/6.EXPRESS-JS/File-Uploads2/1.Images-Upload/node_modules/cloudinary-core/cloudinary-core-shrinkwrap.js#L3540-L3542", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sourcetag_typeof", "code": "function sourcetag_typeof(obj) { \"@babel/helpers - typeof\"; return sourcetag_typeof = \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && \"function\" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }, sourcetag_typeof(obj); }", "docstring": "// CONCATENATED MODULE: ./src/tags/sourcetag.js", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/6.EXPRESS-JS/File-Uploads2/1.Images-Upload/node_modules/cloudinary-core/cloudinary-core-shrinkwrap.js#L11348-L11348", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lazyClone", "code": "function lazyClone() {\n var result = new LazyWrapper(this.__wrapped__);\n result.__actions__ = copyArray(this.__actions__);\n result.__dir__ = this.__dir__;\n result.__filtered__ = this.__filtered__;\n result.__iteratees__ = copyArray(this.__iteratees__);\n result.__takeCount__ = this.__takeCount__;\n result.__views__ = copyArray(this.__views__);\n return result;\n}", "docstring": "/**\n * Creates a clone of the lazy wrapper object.\n *\n * @private\n * @name clone\n * @memberOf LazyWrapper\n * @returns {Object} Returns the cloned `LazyWrapper` object.\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/6.EXPRESS-JS/File-Uploads2/1.Images-Upload/node_modules/lodash/_lazyClone.js#L12-L21", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "baseLt", "code": "function baseLt(value, other) {\n return value < other;\n }", "docstring": "/**\n * The base implementation of `_.lt` which doesn't coerce arguments.\n *\n * @private\n * @param {*} value The value to compare.\n * @param {*} other The other value to compare.\n * @returns {boolean} Returns `true` if `value` is less than `other`,\n * else `false`.\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/6.EXPRESS-JS/File-Uploads2/1.Images-Upload/node_modules/lodash/lodash.js#L3568-L3570", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "status", "code": "function status (code) {\n if (typeof code === 'number') {\n return getStatusMessage(code)\n }\n\n if (typeof code !== 'string') {\n throw new TypeError('code must be a number or string')\n }\n\n // '403'\n var n = parseInt(code, 10)\n if (!isNaN(n)) {\n return getStatusMessage(n)\n }\n\n return getStatusCode(code)\n}", "docstring": "/**\n * Get the status code.\n *\n * Given a number, this will throw if it is not a known status\n * code, otherwise the code will be returned. Given a string,\n * the string will be parsed for a number and return the code\n * if valid, otherwise will lookup the code assuming this is\n * the status message.\n *\n * @param {string|number} code\n * @returns {number}\n * @public\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/6.EXPRESS-JS/File-Uploads2/1.Images-Upload/node_modules/statuses/index.js#L130-L146", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "useColors", "code": "function useColors() {\n // NB: In an Electron preload script, document will be defined but not fully\n // initialized. Since we know we're in Chrome, we'll just detect this case\n // explicitly\n if (typeof window !== 'undefined' && window.process && window.process.type === 'renderer') {\n return true;\n }\n\n // is webkit? http://stackoverflow.com/a/16459606/376773\n // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632\n return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||\n // is firebug? http://stackoverflow.com/a/398120/376773\n (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||\n // is firefox >= v31?\n // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages\n (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\\/(\\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||\n // double check webkit in userAgent just in case we are in a worker\n (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\\/(\\d+)/));\n}", "docstring": "/**\n * Currently only WebKit-based Web Inspectors, Firefox >= v31,\n * and the Firebug extension (any Firefox version) are known\n * to support \"%c\" CSS customizations.\n *\n * TODO: add a `localStorage` variable to explicitly enable/disable colors\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/AI-PROJECTS/DALLE-3-IMAGE-GENERATOR/backend/node_modules/debug/src/browser.js#L39-L57", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "baseXor", "code": "function baseXor(arrays, iteratee, comparator) {\n var length = arrays.length;\n if (length < 2) {\n return length ? baseUniq(arrays[0]) : [];\n }\n var index = -1,\n result = Array(length);\n\n while (++index < length) {\n var array = arrays[index],\n othIndex = -1;\n\n while (++othIndex < length) {\n if (othIndex != index) {\n result[index] = baseDifference(result[index] || array, arrays[othIndex], iteratee, comparator);\n }\n }\n }\n return baseUniq(baseFlatten(result, 1), iteratee, comparator);\n}", "docstring": "/**\n * The base implementation of methods like `_.xor`, without support for\n * iteratee shorthands, that accepts an array of arrays to inspect.\n *\n * @private\n * @param {Array} arrays The arrays to inspect.\n * @param {Function} [iteratee] The iteratee invoked per element.\n * @param {Function} [comparator] The comparator invoked per element.\n * @returns {Array} Returns the new array of values.\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/AI-PROJECTS/DALLE-3-IMAGE-GENERATOR/backend/node_modules/lodash/_baseXor.js#L15-L34", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createCurry", "code": "function createCurry(func, bitmask, arity) {\n var Ctor = createCtor(func);\n\n function wrapper() {\n var length = arguments.length,\n args = Array(length),\n index = length,\n placeholder = getHolder(wrapper);\n\n while (index--) {\n args[index] = arguments[index];\n }\n var holders = (length < 3 && args[0] !== placeholder && args[length - 1] !== placeholder)\n ? []\n : replaceHolders(args, placeholder);\n\n length -= holders.length;\n if (length < arity) {\n return createRecurry(\n func, bitmask, createHybrid, wrapper.placeholder, undefined,\n args, holders, undefined, undefined, arity - length);\n }\n var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func;\n return apply(fn, this, args);\n }\n return wrapper;\n}", "docstring": "/**\n * Creates a function that wraps `func` to enable currying.\n *\n * @private\n * @param {Function} func The function to wrap.\n * @param {number} bitmask The bitmask flags. See `createWrap` for more details.\n * @param {number} arity The arity of `func`.\n * @returns {Function} Returns the new wrapped function.\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/AI-PROJECTS/DALLE-3-IMAGE-GENERATOR/backend/node_modules/lodash/_createCurry.js#L18-L44", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Collection.namespace", "code": "get namespace() {\n return this.fullNamespace.toString();\n }", "docstring": "/**\n * The namespace of this collection, in the format `${this.dbName}.${this.collectionName}`\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/AI-PROJECTS/DALLE-3-IMAGE-GENERATOR/backend/node_modules/mongodb/lib/collection.js#L95-L97", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ServerSession.hasTimedOut", "code": "hasTimedOut(sessionTimeoutMinutes) {\n // Take the difference of the lastUse timestamp and now, which will result in a value in\n // milliseconds, and then convert milliseconds to minutes to compare to `sessionTimeoutMinutes`\n const idleTimeMinutes = Math.round((((0, utils_1.calculateDurationInMs)(this.lastUse) % 86400000) % 3600000) / 60000);\n return idleTimeMinutes > sessionTimeoutMinutes - 1;\n }", "docstring": "/**\n * Determines if the server session has timed out.\n *\n * @param sessionTimeoutMinutes - The server's \"logicalSessionTimeoutMinutes\"\n */", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/AI-PROJECTS/DALLE-3-IMAGE-GENERATOR/backend/node_modules/mongodb/lib/sessions.js#L555-L560", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "checkCharacter", "code": "function checkCharacter(inputChar, targetChar) {\n if (inputChar !== targetChar) {\n errors++;\n //play error sound\n new Audio(\"/error.mp3\").play();\n return false;\n } else {\n return true;\n }\n }", "docstring": "//Function to check typed character", "url": "https://github.com/tweneboah/Full-Stack-Web-Development-Bootcamp-Course/blob/6880262d9aa3d9e22e2a7910d7b7e727de75f590/PROJECTS/Typing-test-project-final/script.js#L46-L55", "sha": "6880262d9aa3d9e22e2a7910d7b7e727de75f590"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Client.DNSBLStatus", "code": "async DNSBLStatus() {\n\t\t\tconst fn = \"DNSBLStatus\";\n\t\t\tconst paramTypes = [];\n\t\t\tconst returnTypes = [[\"{}\", \"{}\", \"string\"], [\"[]\", \"Domain\"], [\"[]\", \"Domain\"]];\n\t\t\tconst params = [];\n\t\t\treturn await _sherpaCall(this.baseURL, this.authState, { ...this.options }, paramTypes, returnTypes, fn, params);\n\t\t}", "docstring": "// TLSRPTSummaries returns a summary of received TLS reports overlapping with", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/webadmin/admin.js#L714-L720", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "innerMode", "code": "function innerMode(mode, state) {\n var info;\n while (mode.innerMode) {\n info = mode.innerMode(state);\n if (!info || info.mode == mode) { break }\n state = info.state;\n mode = info.mode;\n }\n return info || {mode: mode, state: state}\n }", "docstring": "// Given a mode and a state (for that mode), find the inner mode and", "url": "https://github.com/hoodoer/JS-Tap/blob/13e455a3c5c101cac6a573677bf173b6eacc0fa4/static/node_modules/codemirror/addon/runmode/runmode-standalone.js#L208-L217", "sha": "13e455a3c5c101cac6a573677bf173b6eacc0fa4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "drawSelectionCursor", "code": "function drawSelectionCursor(cm, head, output) {\n var pos = cursorCoords(cm, head, \"div\", null, null, !cm.options.singleCursorHeightPerLine);\n\n var cursor = output.appendChild(elt(\"div\", \"\\u00a0\", \"CodeMirror-cursor\"));\n cursor.style.left = pos.left + \"px\";\n cursor.style.top = pos.top + \"px\";\n cursor.style.height = Math.max(0, pos.bottom - pos.top) * cm.options.cursorHeight + \"px\";\n\n if (/\\bcm-fat-cursor\\b/.test(cm.getWrapperElement().className)) {\n var charPos = charCoords(cm, head, \"div\", null, null);\n var width = charPos.right - charPos.left;\n cursor.style.width = (width > 0 ? width : cm.defaultCharWidth()) + \"px\";\n }\n\n if (pos.other) {\n // Secondary cursor, shown when on a 'jump' in bi-directional text\n var otherCursor = output.appendChild(elt(\"div\", \"\\u00a0\", \"CodeMirror-cursor CodeMirror-secondarycursor\"));\n otherCursor.style.display = \"\";\n otherCursor.style.left = pos.other.left + \"px\";\n otherCursor.style.top = pos.other.top + \"px\";\n otherCursor.style.height = (pos.other.bottom - pos.other.top) * .85 + \"px\";\n }\n }", "docstring": "// Draws a cursor for the given range", "url": "https://github.com/hoodoer/JS-Tap/blob/13e455a3c5c101cac6a573677bf173b6eacc0fa4/static/node_modules/codemirror/lib/codemirror.js#L3192-L3214", "sha": "13e455a3c5c101cac6a573677bf173b6eacc0fa4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Command.requiredOption", "code": "requiredOption(flags, description, fn, defaultValue) {\n return this._optionEx({ mandatory: true }, flags, description, fn, defaultValue);\n }", "docstring": "/**\n * Add a required option which must have a value after parsing. This usually means\n * the option must be specified on the command line. (Otherwise the same as .option().)\n *\n * The `flags` string contains the short and/or long flags, separated by comma, a pipe or space.\n *\n * @param {string} flags\n * @param {string} [description]\n * @param {Function|*} [fn] - custom option processing function or default value\n * @param {*} [defaultValue]\n * @return {Command} `this` command for chaining\n */", "url": "https://github.com/hoodoer/JS-Tap/blob/13e455a3c5c101cac6a573677bf173b6eacc0fa4/static/node_modules/commander/lib/command.js#L674-L676", "sha": "13e455a3c5c101cac6a573677bf173b6eacc0fa4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "paddingTop", "code": "function paddingTop(display) {return display.lineSpace.offsetTop}", "docstring": "// POSITION MEASUREMENT", "url": "https://github.com/takahirox/webgpu-devtools/blob/cd7b11a54ee23df592347153d523da885e9bc975/extensions/panel.js#L2320-L2320", "sha": "cd7b11a54ee23df592347153d523da885e9bc975"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "visualLineContinued", "code": "function visualLineContinued(line) {\n var merged, lines;\n while (merged = collapsedSpanAtEnd(line)) {\n line = merged.find(1, true).line\n ;(lines || (lines = [])).push(line);\n }\n return lines\n }", "docstring": "// Returns an array of logical lines that continue the visual line", "url": "https://github.com/FuShengPing/android-qinglong/blob/f1e50c9279ed7081f6dcd882accdbb525e5ed5fc/app/src/main/assets/web/editor/codemirror.js#L1572-L1579", "sha": "f1e50c9279ed7081f6dcd882accdbb525e5ed5fc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Duration.reconfigure", "code": "reconfigure({ locale, numberingSystem, conversionAccuracy, matrix } = {}) {\n const loc = this.loc.clone({ locale, numberingSystem });\n const opts = { loc, matrix, conversionAccuracy };\n return clone$1(this, opts);\n }", "docstring": "/**\n * \"Set\" the locale and/or numberingSystem. Returns a newly-constructed Duration.\n * @example dur.reconfigure({ locale: 'en-GB' })\n * @return {Duration}\n */", "url": "https://github.com/CreArts-Community/CreArts-Obsidian-Vault/blob/5092f39680b1a13289962114f9d38622fe652551/.obsidian/plugins/dataview/main.js#L3195-L3199", "sha": "5092f39680b1a13289962114f9d38622fe652551"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Duration.shiftToAll", "code": "shiftToAll() {\n if (!this.isValid) return this;\n return this.shiftTo(\n \"years\",\n \"months\",\n \"weeks\",\n \"days\",\n \"hours\",\n \"minutes\",\n \"seconds\",\n \"milliseconds\"\n );\n }", "docstring": "/**\n * Shift this Duration to all available units.\n * Same as shiftTo(\"years\", \"months\", \"weeks\", \"days\", \"hours\", \"minutes\", \"seconds\", \"milliseconds\")\n * @return {Duration}\n */", "url": "https://github.com/CreArts-Community/CreArts-Obsidian-Vault/blob/5092f39680b1a13289962114f9d38622fe652551/.obsidian/plugins/dataview/main.js#L3306-L3318", "sha": "5092f39680b1a13289962114f9d38622fe652551"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DateTime.second", "code": "get second() {\n return this.isValid ? this.c.second : NaN;\n }", "docstring": "/**\n * Get the hour of the day (0-23).\n * @example DateTime.local(2017, 5, 25, 9).hour //=> 9\n * @type {number}\n */", "url": "https://github.com/CreArts-Community/CreArts-Obsidian-Vault/blob/5092f39680b1a13289962114f9d38622fe652551/.obsidian/plugins/dataview/main.js#L6023-L6025", "sha": "5092f39680b1a13289962114f9d38622fe652551"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DateTime.until", "code": "until(otherDateTime) {\n return this.isValid ? Interval.fromDateTimes(this, otherDateTime) : this;\n }", "docstring": "/**\n * Returns a JavaScript object with this DateTime's year, month, day, and so on.\n * @param opts - options for generating the object\n * @param {boolean} [opts.includeConfig=false] - include configuration attributes in the output\n * @example DateTime.now().toObject() //=> { year: 2017, month: 4, day: 22, hour: 20, minute: 49, second: 42, millisecond: 268 }\n * @return {Object}\n */", "url": "https://github.com/CreArts-Community/CreArts-Obsidian-Vault/blob/5092f39680b1a13289962114f9d38622fe652551/.obsidian/plugins/dataview/main.js#L6839-L6841", "sha": "5092f39680b1a13289962114f9d38622fe652551"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "InlineWidget.toDOM", "code": "toDOM(view) {\n this.el.addClasses(this.cssClasses);\n return this.el;\n }", "docstring": "// to prevent redraws when the editor updates.", "url": "https://github.com/CreArts-Community/CreArts-Obsidian-Vault/blob/5092f39680b1a13289962114f9d38622fe652551/.obsidian/plugins/dataview/main.js#L19172-L19175", "sha": "5092f39680b1a13289962114f9d38622fe652551"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "compileGeneralSelector", "code": "function compileGeneralSelector(next, selector, options, context, compileToken) {\n var adapter = options.adapter, equals = options.equals;\n switch (selector.type) {\n case css_what_1.SelectorType.PseudoElement: {\n throw new Error(\"Pseudo-elements are not supported by css-select\");\n }\n case css_what_1.SelectorType.ColumnCombinator: {\n throw new Error(\"Column combinators are not yet supported by css-select\");\n }\n case css_what_1.SelectorType.Attribute: {\n if (selector.namespace != null) {\n throw new Error(\"Namespaced attributes are not yet supported by css-select\");\n }\n if (!options.xmlMode || options.lowerCaseAttributeNames) {\n selector.name = selector.name.toLowerCase();\n }\n return attributes.attributeRules[selector.action](next, selector, options);\n }\n case css_what_1.SelectorType.Pseudo: {\n return (0, pseudoSelectors.compilePseudoSelector)(next, selector, options, context, compileToken);\n }\n // Tags\n case css_what_1.SelectorType.Tag: {\n if (selector.namespace != null) {\n throw new Error(\"Namespaced tag names are not yet supported by css-select\");\n }\n var name_1 = selector.name;\n if (!options.xmlMode || options.lowerCaseTags) {\n name_1 = name_1.toLowerCase();\n }\n return function tag(elem) {\n return adapter.getName(elem) === name_1 && next(elem);\n };\n }\n // Traversal\n case css_what_1.SelectorType.Descendant: {\n if (options.cacheResults === false ||\n typeof WeakSet === \"undefined\") {\n return function descendant(elem) {\n var current = elem;\n while ((current = getElementParent(current, adapter))) {\n if (next(current)) {\n return true;\n }\n }\n return false;\n };\n }\n // @ts-expect-error `ElementNode` is not extending object\n var isFalseCache_1 = new WeakSet();\n return function cachedDescendant(elem) {\n var current = elem;\n while ((current = getElementParent(current, adapter))) {\n if (!isFalseCache_1.has(current)) {\n if (adapter.isTag(current) && next(current)) {\n return true;\n }\n isFalseCache_1.add(current);\n }\n }\n return false;\n };\n }\n case \"_flexibleDescendant\": {\n // Include element itself, only used while querying an array\n return function flexibleDescendant(elem) {\n var current = elem;\n do {\n if (next(current))\n return true;\n } while ((current = getElementParent(current, adapter)));\n return false;\n };\n }\n case css_what_1.SelectorType.Parent: {\n return function parent(elem) {\n return adapter\n .getChildren(elem)\n .some(function (elem) { return adapter.isTag(elem) && next(elem); });\n };\n }\n case css_what_1.SelectorType.Child: {\n return function child(elem) {\n var parent = adapter.getParent(elem);\n return parent != null && adapter.isTag(parent) && next(parent);\n };\n }\n case css_what_1.SelectorType.Sibling: {\n return function sibling(elem) {\n var siblings = adapter.getSiblings(elem);\n for (var i = 0; i < siblings.length; i++) {\n var currentSibling = siblings[i];\n if (equals(elem, currentSibling))\n break;\n if (adapter.isTag(currentSibling) && next(currentSibling)) {\n return true;\n }\n }\n return false;\n };\n }\n case css_what_1.SelectorType.Adjacent: {\n if (adapter.prevElementSibling) {\n return function adjacent(elem) {\n var previous = adapter.prevElementSibling(elem);\n return previous != null && next(previous);\n };\n }\n return function adjacent(elem) {\n var siblings = adapter.getSiblings(elem);\n var lastElement;\n for (var i = 0; i < siblings.length; i++) {\n var currentSibling = siblings[i];\n if (equals(elem, currentSibling))\n break;\n if (adapter.isTag(currentSibling)) {\n lastElement = currentSibling;\n }\n }\n return !!lastElement && next(lastElement);\n };\n }\n case css_what_1.SelectorType.Universal: {\n if (selector.namespace != null && selector.namespace !== \"*\") {\n throw new Error(\"Namespaced universal selectors are not yet supported by css-select\");\n }\n return next;\n }\n }\n}", "docstring": "/*\n * All available rules\n */", "url": "https://github.com/CreArts-Community/CreArts-Obsidian-Vault/blob/5092f39680b1a13289962114f9d38622fe652551/.obsidian/plugins/simple-embeds/main.js#L6158-L6287", "sha": "5092f39680b1a13289962114f9d38622fe652551"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isBlob", "code": "function isBlob(x) {\n return typeof x.stream === \"function\";\n}", "docstring": "// node_modules/.pnpm/@azure+core-rest-pipeline@1.18.1/node_modules/@azure/core-rest-pipeline/dist/esm/util/typeGuards.js", "url": "https://github.com/mamba-org/setup-micromamba/blob/59bcdbae779c639f869cc8e3b223f36692af05c2/dist/post.js#L25942-L25944", "sha": "59bcdbae779c639f869cc8e3b223f36692af05c2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getDisableDocs", "code": "function getDisableDocs(r) {\n const value = nginxConfig.disableDocs \n && !ngx.shared[\"tmpDict\"].get(\"opendocs\");\n // r.log(`getDisableDocs: ${value}`);\n return value;\n}", "docstring": "// for js_set", "url": "https://github.com/bpking1/embyExternalUrl/blob/da18efe0009453c5ae198fc3cf44b84c6ee965f5/emby2Alist/nginx/conf.d/config/constant-nginx.js#L11-L16", "sha": "da18efe0009453c5ae198fc3cf44b84c6ee965f5"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "checkOffset", "code": "function checkOffset (offset, ext, length) {\n if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')\n if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')\n}", "docstring": "/*\n * Need to make sure that buffer isn't trying to write out of bounds.\n */", "url": "https://github.com/chartello/chartello/blob/755b5ecf6094619da88ebe2e3e35a852de528036/public/js/app.js#L23686-L23689", "sha": "755b5ecf6094619da88ebe2e3e35a852de528036"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "toType", "code": "function toType(obj) {\n return {}.toString.call(obj).match(/\\s([a-z]+)/i)[1].toLowerCase();\n }", "docstring": "// Shoutout AngusCroll (https://goo.gl/pxwQGp)", "url": "https://github.com/zmrid/iMES-Factory/blob/d301552bd0e953882ea0b2162c0e17b36dd3a2a9/iMES.Net/iMES.WebApi/wwwroot/lib/bootstrap/dist/js/bootstrap.bundle.js#L86-L88", "sha": "d301552bd0e953882ea0b2162c0e17b36dd3a2a9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "get", "code": "function get (url, param, loading, config) {\n showLoading(loading);\n axios.defaults.headers[_Authorization] = getToken();\n return new Promise((resolve, reject) => {\n axios.get(url, config)\n .then(response => {\n resolve(response.data)\n }, err => {\n reject(err)\n })\n .catch((error) => {\n reject(error)\n })\n })\n}", "docstring": "//=true异步请求时会显示遮罩层,=字符串,异步请求时遮罩层显示当前字符串", "url": "https://github.com/zmrid/iMES-Factory/blob/d301552bd0e953882ea0b2162c0e17b36dd3a2a9/iMES.Vue3/src/api/http.js#L138-L152", "sha": "d301552bd0e953882ea0b2162c0e17b36dd3a2a9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "includes", "code": "function includes(collection, value, fromIndex, guard) {\n collection = isArrayLike(collection) ? collection : values(collection);\n fromIndex = (fromIndex && !guard) ? toInteger(fromIndex) : 0;\n\n var length = collection.length;\n if (fromIndex < 0) {\n fromIndex = nativeMax(length + fromIndex, 0);\n }\n return isString(collection)\n ? (fromIndex <= length && collection.indexOf(value, fromIndex) > -1)\n : (!!length && baseIndexOf(collection, value, fromIndex) > -1);\n }", "docstring": "/**\n * Checks if `value` is in `collection`. If `collection` is a string, it's\n * checked for a substring of `value`, otherwise\n * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)\n * is used for equality comparisons. If `fromIndex` is negative, it's used as\n * the offset from the end of `collection`.\n *\n * @static\n * @memberOf _\n * @since 0.1.0\n * @category Collection\n * @param {Array|Object|string} collection The collection to inspect.\n * @param {*} value The value to search for.\n * @param {number} [fromIndex=0] The index to search from.\n * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`.\n * @returns {boolean} Returns `true` if `value` is found, else `false`.\n * @example\n *\n * _.includes([1, 2, 3], 1);\n * // => true\n *\n * _.includes([1, 2, 3], 1, 2);\n * // => false\n *\n * _.includes({ 'a': 1, 'b': 2 }, 1);\n * // => true\n *\n * _.includes('abcd', 'bc');\n * // => true\n */", "url": "https://github.com/MHSanaei/3x-ui/blob/49bfff9fa5a6acb50c8f51710a3e4005daf0ca32/web/assets/codemirror/jshint.js#L10885-L10896", "sha": "49bfff9fa5a6acb50c8f51710a3e4005daf0ca32"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isValidArrayIndex", "code": "function isValidArrayIndex(val) {\n const n = parseFloat(String(val));\n return n >= 0 && Math.floor(n) === n && isFinite(val);\n}", "docstring": "/**\n * Check if val is a valid array index.\n */", "url": "https://github.com/MHSanaei/3x-ui/blob/49bfff9fa5a6acb50c8f51710a3e4005daf0ca32/web/assets/vue/vue.common.dev.js#L65-L68", "sha": "49bfff9fa5a6acb50c8f51710a3e4005daf0ca32"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "defineReactive", "code": "function defineReactive(obj, key, val, customSetter, shallow, mock, observeEvenIfShallow = false) {\n const dep = new Dep();\n const property = Object.getOwnPropertyDescriptor(obj, key);\n if (property && property.configurable === false) {\n return;\n }\n // cater for pre-defined getter/setters\n const getter = property && property.get;\n const setter = property && property.set;\n if ((!getter || setter) &&\n (val === NO_INITIAL_VALUE || arguments.length === 2)) {\n val = obj[key];\n }\n let childOb = shallow ? val && val.__ob__ : observe(val, false, mock);\n Object.defineProperty(obj, key, {\n enumerable: true,\n configurable: true,\n get: function reactiveGetter() {\n const value = getter ? getter.call(obj) : val;\n if (Dep.target) {\n {\n dep.depend({\n target: obj,\n type: \"get\" /* TrackOpTypes.GET */,\n key\n });\n }\n if (childOb) {\n childOb.dep.depend();\n if (isArray(value)) {\n dependArray(value);\n }\n }\n }\n return isRef(value) && !shallow ? value.value : value;\n },\n set: function reactiveSetter(newVal) {\n const value = getter ? getter.call(obj) : val;\n if (!hasChanged(value, newVal)) {\n return;\n }\n if (customSetter) {\n customSetter();\n }\n if (setter) {\n setter.call(obj, newVal);\n }\n else if (getter) {\n // #7981: for accessor properties without setter\n return;\n }\n else if (!shallow && isRef(value) && !isRef(newVal)) {\n value.value = newVal;\n return;\n }\n else {\n val = newVal;\n }\n childOb = shallow ? newVal && newVal.__ob__ : observe(newVal, false, mock);\n {\n dep.notify({\n type: \"set\" /* TriggerOpTypes.SET */,\n target: obj,\n key,\n newValue: newVal,\n oldValue: value\n });\n }\n }\n });\n return dep;\n}", "docstring": "/**\n * Define a reactive property on an Object.\n */", "url": "https://github.com/MHSanaei/3x-ui/blob/49bfff9fa5a6acb50c8f51710a3e4005daf0ca32/web/assets/vue/vue.common.dev.js#L940-L1011", "sha": "49bfff9fa5a6acb50c8f51710a3e4005daf0ca32"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "processSlotOutlet", "code": "function processSlotOutlet(el) {\n if (el.tag === 'slot') {\n el.slotName = getBindingAttr(el, 'name');\n if (el.key) {\n warn(\"`key` does not work on because slots are abstract outlets \" +\n \"and can possibly expand into multiple elements. \" +\n \"Use the key on a wrapping element instead.\", getRawBindingAttr(el, 'key'));\n }\n }\n }", "docstring": "// handle outlets", "url": "https://github.com/MHSanaei/3x-ui/blob/49bfff9fa5a6acb50c8f51710a3e4005daf0ca32/web/assets/vue/vue.js#L10389-L10398", "sha": "49bfff9fa5a6acb50c8f51710a3e4005daf0ca32"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fileFlagLocal2Remote", "code": "function fileFlagLocal2Remote(flag) {\n return {\n type: SpecialArgType.FILEFLAG,\n flagStr: flag.getFlagString()\n };\n}", "docstring": "/**\n * @hidden\n */", "url": "https://github.com/sparrow-js/an-codeAI/blob/a6184f39fb36ad9cda9857f45a1226acc3cdd0b9/public/static/browserfs11/browserfs.js#L10937-L10942", "sha": "a6184f39fb36ad9cda9857f45a1226acc3cdd0b9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "conditionalGroup", "code": "function conditionalGroup(states, opts) {\n return group(states[0], Object.assign({}, opts, {\n expandedStates: states\n }));\n }", "docstring": "/**\n * @param {Doc[]} states\n * @param {object} [opts] - TBD ???\n * @returns Doc\n */", "url": "https://github.com/sparrow-js/an-codeAI/blob/a6184f39fb36ad9cda9857f45a1226acc3cdd0b9/public/static/js/prettier/2.0.5/standalone.js#L14038-L14042", "sha": "a6184f39fb36ad9cda9857f45a1226acc3cdd0b9"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RegExpRoute.constructor", "code": "constructor(regExp, handler, method) {\n {\n finalAssertExports.isInstance(regExp, RegExp, {\n moduleName: 'workbox-routing',\n className: 'RegExpRoute',\n funcName: 'constructor',\n paramName: 'pattern'\n });\n }\n const match = ({\n url\n }) => {\n const result = regExp.exec(url.href);\n // Return immediately if there's no match.\n if (!result) {\n return;\n }\n // Require that the match start at the first character in the URL string\n // if it's a cross-origin request.\n // See https://github.com/GoogleChrome/workbox/issues/281 for the context\n // behind this behavior.\n if (url.origin !== location.origin && result.index !== 0) {\n {\n logger.debug(`The regular expression '${regExp.toString()}' only partially matched ` + `against the cross-origin URL '${url.toString()}'. RegExpRoute's will only ` + `handle cross-origin requests if they match the entire URL.`);\n }\n return;\n }\n // If the route matches, but there aren't any capture groups defined, then\n // this will return [], which is truthy and therefore sufficient to\n // indicate a match.\n // If there are capture groups, then it will return their values.\n return result.slice(1);\n };\n super(match, handler, method);\n }", "docstring": "/**\n * If the regular expression contains\n * [capture groups]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#grouping-back-references},\n * the captured values will be passed to the\n * {@link workbox-routing~handlerCallback} `params`\n * argument.\n *\n * @param {RegExp} regExp The regular expression to match against URLs.\n * @param {workbox-routing~handlerCallback} handler A callback\n * function that returns a Promise resulting in a Response.\n * @param {string} [method='GET'] The HTTP method to match the Route\n * against.\n */", "url": "https://github.com/priyankarpal/projectshut/blob/3b38850653a70809ee8b13bd452208454daa9d79/public/workbox-8817a5e5.js#L616-L650", "sha": "3b38850653a70809ee8b13bd452208454daa9d79"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnthropicClient.getStreamUsage", "code": "getStreamUsage() {\n const inputUsage = this.message_start?.message?.usage ?? {};\n const outputUsage = this.message_delta?.usage ?? {};\n return Object.assign({}, inputUsage, outputUsage);\n }", "docstring": "/**\n * Get stream usage as returned by this client's API response.\n * @returns {AnthropicStreamUsage} The stream usage object.\n */", "url": "https://github.com/danny-avila/LibreChat/blob/52a6de2aa756564ffe12114ebfce0e7f93ea125c/api/app/clients/AnthropicClient.js#L195-L199", "sha": "52a6de2aa756564ffe12114ebfce0e7f93ea125c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PyAnnoteFeatureExtractor.samples_to_frames", "code": "samples_to_frames(samples) {\n return ((samples - this.config.offset) / this.config.step);\n }", "docstring": "/**\n * NOTE: Can return fractional values. `Math.ceil` will ensure correct value.\n * @param {number} samples The number of frames in the audio.\n * @returns {number} The number of frames in the audio.\n */", "url": "https://github.com/huggingface/transformers.js/blob/829ace02044ba19ff9fa0293b4b975f1046cb6de/src/models/pyannote/feature_extraction_pyannote.js#L34-L36", "sha": "829ace02044ba19ff9fa0293b4b975f1046cb6de"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getNodeRequestOptions$2", "code": "function getNodeRequestOptions$2(request) {\n\tconst parsedURL = request[INTERNALS$2$2].parsedURL;\n\tconst headers = new Headers$2(request[INTERNALS$2$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream$2.Readable && !streamDestructionSupported$1) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes$2(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders$1(headers),\n\t\tagent\n\t});\n}", "docstring": "/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */", "url": "https://github.com/anc95/ChatGPT-CodeReview/blob/ca9b6722ab7d015b71fe12c02ed24ac855b5d13a/action/github-action.js#L133633-L133697", "sha": "ca9b6722ab7d015b71fe12c02ed24ac855b5d13a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "parseipNotation", "code": "function parseipNotation (note) {\n var pos = note.lastIndexOf('/');\n var str = pos !== -1\n ? note.substring(0, pos)\n : note;\n\n if (!isip(str)) {\n throw new TypeError('invalid IP address: ' + str)\n }\n\n var ip = parseip(str);\n\n if (pos === -1 && ip.kind() === 'ipv6' && ip.isIPv4MappedAddress()) {\n // Store as IPv4\n ip = ip.toIPv4Address();\n }\n\n var max = ip.kind() === 'ipv6'\n ? 128\n : 32;\n\n var range = pos !== -1\n ? note.substring(pos + 1, note.length)\n : null;\n\n if (range === null) {\n range = max;\n } else if (DIGIT_REGEXP.test(range)) {\n range = parseInt(range, 10);\n } else if (ip.kind() === 'ipv4' && isip(range)) {\n range = parseNetmask(range);\n } else {\n range = null;\n }\n\n if (range <= 0 || range > max) {\n throw new TypeError('invalid range on address: ' + note)\n }\n\n return [ip, range]\n}", "docstring": "/**\n * Parse IP notation string into range subnet.\n *\n * @param {String} note\n * @private\n */", "url": "https://github.com/anc95/ChatGPT-CodeReview/blob/ca9b6722ab7d015b71fe12c02ed24ac855b5d13a/action/github-action.js#L179070-L179110", "sha": "ca9b6722ab7d015b71fe12c02ed24ac855b5d13a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getTeyvatData", "code": "async function getTeyvatData (TBody, type = 'single') {\n const apiMap = {\n single: 'https://api.lelaer.com/ys/getDamageResult.php',\n team: 'https://api.lelaer.com/ys/getTeamResult.php'\n }\n try {\n const response = await fetch(apiMap[type], {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...headers // 假设您已经定义了 `headers` 对象\n },\n body: JSON.stringify(TBody),\n timeout: 15000\n })\n const resJson = await response.json()\n return resJson\n } catch (error) {\n console.error('提瓦特小助手接口无法访问或返回错误', error)\n return {}\n }\n}", "docstring": "/**\n * 获取小助手对应功能的数据\n * @param {String} TBody 请求需要的数据\n * @param {String} type 功能对应api 默认为 Single\n * @returns 小助手返回数据\n */", "url": "https://github.com/AFanSKyQs/FanSky_Qs/blob/86d002866114fa1dd1c765325fde621925938adc/apps/Teyvat/GetData/getTeyvatData.js#L15-L36", "sha": "86d002866114fa1dd1c765325fde621925938adc"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fixInput", "code": "function fixInput( src, dest ) {\n\tvar nodeName = dest.nodeName.toLowerCase();\n\n\t// Fails to persist the checked state of a cloned checkbox or radio button.\n\tif ( nodeName === \"input\" && rcheckableType.test( src.type ) ) {\n\t\tdest.checked = src.checked;\n\n\t// Fails to return the selected option to the default selected state when cloning options\n\t} else if ( nodeName === \"input\" || nodeName === \"textarea\" ) {\n\t\tdest.defaultValue = src.defaultValue;\n\t}\n}", "docstring": "// Fix IE bugs, see support tests", "url": "https://github.com/HazyResearch/safari/blob/02220c69d247e5473616cd053a443ad99fd2559b/csrc/fftconv/mathdx/22.02/include/cufftdx/docs/_static/jquery-3.5.1.js#L6034-L6045", "sha": "02220c69d247e5473616cd053a443ad99fd2559b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "resizeBilinear$2", "code": "function resizeBilinear$2(args) {\n\t var inputs = args.inputs,\n\t backend = args.backend,\n\t attrs = args.attrs;\n\t var images = inputs.images;\n\t var alignCorners = attrs.alignCorners,\n\t halfPixelCenters = attrs.halfPixelCenters,\n\t size = attrs.size;\n\t var newHeight = size[0],\n\t newWidth = size[1];\n\t var program = env().getBool('WEBGL_PACK_IMAGE_OPERATIONS') ? new ResizeBilinearPackedProgram(images.shape, newHeight, newWidth, alignCorners, halfPixelCenters) : new ResizeBilinearProgram(images.shape, newHeight, newWidth, alignCorners, halfPixelCenters);\n\t return backend.runWebGLProgram(program, [images], 'float32');\n\t}", "docstring": "/**\n\t * @license\n\t * Copyright 2020 Google LLC. All Rights Reserved.\n\t * Licensed under the Apache License, Version 2.0 (the \"License\");\n\t * you may not use this file except in compliance with the License.\n\t * You may obtain a copy of the License at\n\t *\n\t * http://www.apache.org/licenses/LICENSE-2.0\n\t *\n\t * Unless required by applicable law or agreed to in writing, software\n\t * distributed under the License is distributed on an \"AS IS\" BASIS,\n\t * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\t * See the License for the specific language governing permissions and\n\t * limitations under the License.\n\t * =============================================================================\n\t */", "url": "https://github.com/ducbao414/win32.run/blob/05160523659c100d85802f15e164a0b9358b38b7/static/html/jspaint/lib/tracky-mouse/lib/tf.js#L119284-L119296", "sha": "05160523659c100d85802f15e164a0b9358b38b7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getElementOffset", "code": "function getElementOffset(element) {\n var docElem,\n doc = element && element.ownerDocument,\n box = { left: 0, top: 0 },\n offset = { left: 0, top: 0 },\n scrollLeftTop,\n offsetAttributes = {\n borderLeftWidth: 'left',\n borderTopWidth: 'top',\n paddingLeft: 'left',\n paddingTop: 'top'\n };\n\n if (!doc) {\n return offset;\n }\n\n for (var attr in offsetAttributes) {\n offset[offsetAttributes[attr]] += parseInt(getElementStyle(element, attr), 10) || 0;\n }\n\n docElem = doc.documentElement;\n if ( typeof element.getBoundingClientRect !== 'undefined' ) {\n box = element.getBoundingClientRect();\n }\n\n scrollLeftTop = getScrollLeftTop(element);\n\n return {\n left: box.left + scrollLeftTop.left - (docElem.clientLeft || 0) + offset.left,\n top: box.top + scrollLeftTop.top - (docElem.clientTop || 0) + offset.top\n };\n }", "docstring": "/**\n * Returns offset for a given element\n * @function\n * @memberOf fabric.util\n * @param {HTMLElement} element Element to get offset for\n * @return {Object} Object with \"left\" and \"top\" properties\n */", "url": "https://github.com/ducbao414/win32.run/blob/05160523659c100d85802f15e164a0b9358b38b7/static/html/photon/dist/tui-image-editor.js#L3269-L3301", "sha": "05160523659c100d85802f15e164a0b9358b38b7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_getAndroid", "code": "function _getAndroid() {\n\t\tvar android = false;\n\t\tvar sAgent = navigator.userAgent;\n\t\t\n\t\tif (/android/i.test(sAgent)) { // android\n\t\t\tandroid = true;\n\t\t\taMat = sAgent.toString().match(/android ([0-9]\\.[0-9])/i);\n\t\t\t\n\t\t\tif (aMat && aMat[1]) {\n\t\t\t\tandroid = parseFloat(aMat[1]);\n\t\t\t}\n\t\t}\n\t\t\n\t\treturn android;\n\t}", "docstring": "// android 2.x doesn't support Data-URI spec", "url": "https://github.com/orenyomtov/openordex/blob/44581ec727c439c15178413b1d46c8f6176f253a/js/qrcode.js#L157-L171", "sha": "44581ec727c439c15178413b1d46c8f6176f253a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "normalizeArray", "code": "function normalizeArray(parts, allowAboveRoot) {\n // if the path tries to go above the root, `up` ends up > 0\n var up = 0;\n for (var i = parts.length - 1; i >= 0; i--) {\n var last = parts[i];\n if (last === '.') {\n parts.splice(i, 1);\n } else if (last === '..') {\n parts.splice(i, 1);\n up++;\n } else if (up) {\n parts.splice(i, 1);\n up--;\n }\n }\n\n // if the path is allowed to go above the root, restore leading ..s\n if (allowAboveRoot) {\n for (; up--; up) {\n parts.unshift('..');\n }\n }\n\n return parts;\n}", "docstring": "// Copyright Joyent, Inc. and other Node contributors.", "url": "https://github.com/nikit0ns/Ukrainian_IT_Communities/blob/e0cea263be2e7d1da8e0e655134d787475dee526/site/assets/javascripts/lunr/wordcut.js#L4112-L4136", "sha": "e0cea263be2e7d1da8e0e655134d787475dee526"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "avgPoolGrad", "code": "function avgPoolGrad(args) {\n const { inputs, backend, attrs } = args;\n const { dy, input } = inputs;\n const x = input;\n assertNotComplex([dy, input], 'avgPoolGrad');\n const { filterSize, strides, pad } = attrs;\n const convInfo = tfjsCore.backend_util.computePool2DInfo(x.shape, filterSize, strides, 1 /* dilations */, pad);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = tfjsCore.buffer(x.shape, 'float32');\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n const dyData = backend.data.get(dy.dataId).values;\n const dyBuf = tfjsCore.buffer(dy.shape, 'float32', dyData);\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const pixel = dyBuf.get(b, dyR, dyC, d);\n dotProd += pixel;\n }\n }\n dx.set(dotProd * avgMultiplier, b, dxR, dxC, d);\n }\n }\n }\n }\n return backend.makeTensorInfo(dx.shape, dx.dtype, dx.values);\n }", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.es2017.js#L4774-L4825", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "stringToHashBucketFast", "code": "function stringToHashBucketFast(args) {\n var inputs = args.inputs, backend = args.backend, attrs = args.attrs;\n var numBuckets = attrs.numBuckets;\n var input = inputs.input;\n if (input.dtype !== 'string') {\n throw new Error('Input must be of datatype string');\n }\n if (numBuckets <= 0) {\n throw new Error(\"Number of buckets must be at least 1\");\n }\n var $input = backend.readSync(input.dataId);\n var output = stringToHashBucketFastImplCPU($input, numBuckets);\n return backend.makeTensorInfo(input.shape, 'int32', output);\n}", "docstring": "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-backend-webgl/dist/tf-backend-webgl.node.js#L17270-L17283", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "copy", "code": "function copy(f, t) {\n t.i = f.i;\n t.j = f.j;\n t.S = f.S.slice();\n return t;\n }", "docstring": "//", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-converter/dist/tf-converter.es2017.js#L20137-L20142", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "argMin_", "code": "function argMin_(x, axis = 0) {\n const $x = convertToTensor(x, 'x', 'argMin');\n const inputs = { x: $x };\n const attrs = { axis };\n return ENGINE.runKernel(ArgMin, inputs, attrs);\n}", "docstring": "/**\n * @license\n * Copyright 2020 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-converter/dist/tf-converter.fesm.js#L12280-L12285", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "avgPool3d_", "code": "function avgPool3d_(x, filterSize, strides, pad, dimRoundingMode, dataFormat = 'NDHWC') {\n const $x = convertToTensor(x, 'x', 'avgPool3d', 'float32');\n let x5D = $x;\n let reshapedTo5D = false;\n if ($x.rank === 4) {\n reshapedTo5D = true;\n x5D = reshape($x, [1, $x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]);\n }\n assert(x5D.rank === 5, () => `Error in avgPool3d: x must be rank 5 but got rank ${x5D.rank}.`);\n assert(dataFormat === 'NDHWC', () => `Error in avgPool3d: Only NDHWC is currently supported, ` +\n `but got dataFormat of ${dataFormat}`);\n assert((typeof strides === 'number' && strides > 0) ||\n (Array.isArray(strides) && strides[0] > 0 && strides[1] > 0 &&\n strides[2] > 0), () => `Error in avgPool3d: Stride must be > 0, but got '${strides}'`);\n checkPadOnDimRoundingMode('avgPool3d', pad, dimRoundingMode);\n const inputs = { x: x5D };\n const attrs = { filterSize, strides, pad, dimRoundingMode, dataFormat };\n // tslint:disable-next-line: no-unnecessary-type-assertion\n let res = ENGINE.runKernel(AvgPool3D, inputs, attrs);\n res = cast(res, x5D.dtype);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n}", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-converter/dist/tf-converter.fesm.js#L12917-L12941", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "minimum_", "code": "function minimum_(a, b) {\n let $a = convertToTensor(a, 'a', 'minimum');\n let $b = convertToTensor(b, 'b', 'minimum');\n [$a, $b] = makeTypesMatch($a, $b);\n if ($a.dtype === 'bool') {\n $a = cast($a, 'int32');\n $b = cast($b, 'int32');\n }\n assertAndGetBroadcastShape($a.shape, $b.shape);\n const inputs = { a: $a, b: $b };\n return ENGINE.runKernel(Minimum, inputs);\n}", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-converter/dist/tf-converter.fesm.js#L18225-L18236", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "movingAverage_", "code": "function movingAverage_(v, x, decay, step, zeroDebias = true) {\n const $v = convertToTensor(v, 'v', 'movingAverage');\n const $x = convertToTensor(x, 'x', 'movingAverage');\n const $decay = convertToTensor(decay, 'decay', 'movingAverage');\n assertTypesMatch($v, $x);\n assert(arraysEqual($v.shape, $x.shape), () => 'Shape mismatch in v and x');\n const one = scalar(1);\n const oneMinusDecay = sub(one, $decay);\n let update = mul(sub($x, $v), oneMinusDecay);\n if (zeroDebias) {\n assert(step != null, () => 'When using zeroDebias: true, step is required.');\n const $step = convertToTensor(step, 'step', 'movingAverage');\n update = div(update, sub(one, pow($decay, $step)));\n }\n return add($v, update);\n}", "docstring": "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-converter/dist/tf-converter.fesm.js#L23247-L23262", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "conv3DBackpropInput_", "code": "function conv3DBackpropInput_(xShape, dy, filter, strides, pad) {\n assert(xShape.length === dy.rank, function () { return \"Length of inShape \" +\n \"(\".concat(xShape.length, \") and rank of dy (\").concat(dy.rank, \") must match\"); });\n var xShape5D = xShape;\n var dy5D = dy;\n var reshapedTo5D = false;\n if (dy.rank === 4) {\n reshapedTo5D = true;\n dy5D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2], dy.shape[3]]);\n xShape5D = [1, xShape[0], xShape[1], xShape[2], xShape[3]];\n }\n var inDepth = xShape5D[4];\n var outDepth = dy5D.shape[4];\n assert(xShape5D.length === 5, function () { return \"Error in conv3dDerInput: inShape must be length 5, but got length \" +\n \"\".concat(xShape5D.length, \".\"); });\n assert(dy5D.rank === 5, function () { return \"Error in conv3dDerInput: dy must be rank 5, but got \" +\n \"rank \".concat(dy5D.rank); });\n assert(filter.rank === 5, function () { return \"Error in conv3dDerInput: filter must be rank 5, but got \" +\n \"rank \".concat(filter.rank); });\n assert(inDepth === filter.shape[3], function () { return \"Error in conv3dDerInput: depth of input (\".concat(inDepth, \") must \") +\n \"match input depth for filter \".concat(filter.shape[3], \".\"); });\n assert(outDepth === filter.shape[4], function () { return \"Error in conv3dDerInput: depth of output (\".concat(outDepth, \") must \") +\n \"match output depth for filter \".concat(filter.shape[4], \".\"); });\n var inputs = { dy: dy5D, filter: filter };\n var attrs = { pad: pad, strides: strides, inputShape: xShape5D };\n // tslint:disable-next-line: no-unnecessary-type-assertion\n var res = ENGINE.runKernel(Conv3DBackpropInputV2, inputs, attrs);\n if (reshapedTo5D) {\n return reshape(res, [res.shape[1], res.shape[2], res.shape[3], res.shape[4]]);\n }\n return res;\n }", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-converter/dist/tf-converter.js#L14534-L14565", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "meshgrid", "code": "function meshgrid(x, y, _a) {\n var _b = _a === void 0 ? {} : _a, _c = _b.indexing, indexing = _c === void 0 ? 'xy' : _c;\n if (indexing !== 'xy' && indexing !== 'ij') {\n throw new TypeError(\"\".concat(indexing, \" is not a valid third argument to meshgrid\"));\n }\n if (x === undefined) {\n return [];\n }\n var $x = convertToTensor(x, 'x', 'meshgrid', x instanceof Tensor ? x.dtype : 'float32');\n if (y === undefined) {\n return [$x];\n }\n var $y = convertToTensor(y, 'y', 'meshgrid', y instanceof Tensor ? y.dtype : 'float32');\n var w = sizeFromShape($x.shape);\n var h = sizeFromShape($y.shape);\n if (indexing === 'xy') {\n $x = reshape($x, [1, -1]);\n $y = reshape($y, [-1, 1]);\n return [\n matMul$1(ones([h, 1], $x.dtype), $x),\n matMul$1($y, ones([1, w], $y.dtype)),\n ];\n }\n $x = reshape($x, [-1, 1]);\n $y = reshape($y, [1, -1]);\n return [\n matMul$1($x, ones([1, h], $x.dtype)),\n matMul$1(ones([w, 1], $y.dtype), $y),\n ];\n }", "docstring": "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-converter/dist/tf-converter.js#L18081-L18110", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "depthwiseConv2dNativeBackpropFilter_", "code": "function depthwiseConv2dNativeBackpropFilter_(x, dy, filterShape, strides, pad, dilations, dimRoundingMode) {\n if (dilations === void 0) { dilations = [1, 1]; }\n var x4D = x;\n if (x.rank === 3) {\n x4D = reshape(x, [1, x.shape[0], x.shape[1], x.shape[2]]);\n }\n var dy4D = dy;\n if (dy4D.rank === 3) {\n dy4D = reshape(dy, [1, dy.shape[0], dy.shape[1], dy.shape[2]]);\n }\n var inputs = { x: x4D, dy: dy4D };\n var attrs = { strides: strides, pad: pad, dimRoundingMode: dimRoundingMode, dilations: dilations, filterShape: filterShape };\n // tslint:disable-next-line: no-unnecessary-type-assertion\n return ENGINE.runKernel(DepthwiseConv2dNativeBackpropFilter, inputs, attrs);\n }", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-converter/dist/tf-converter.js#L23878-L23892", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IORouterRegistry.registerLoadRouter", "code": "static registerLoadRouter(loadRouter) {\n IORouterRegistry.getInstance().loadRouters.push(loadRouter);\n }", "docstring": "/**\n * Register a load-handler router.\n *\n * @param loadRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `load` method defined or `null`.\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-core/dist/tf-core.fesm.js#L6360-L6362", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "makeTensor", "code": "function makeTensor(values, shape, inferredShape, dtype) {\n if (dtype == null) {\n dtype = inferDtype(values);\n }\n else if (dtype === 'complex64') {\n throw new Error(\"Cannot construct a complex64 tensor directly. \" +\n \"Please use tf.complex(real, imag).\");\n }\n if (typeof values === 'object' &&\n ('texture' in values ||\n ('buffer' in values && !(values.buffer instanceof ArrayBuffer)))) {\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new Error(\"Creating tensor from GPU data only supports \" +\n \"'float32'|'int32' dtype, while the dtype is \".concat(dtype, \".\"));\n }\n return ENGINE.backend.createTensorFromGPUData(values, shape || inferredShape, dtype);\n }\n if (!isTypedArray(values) && !Array.isArray(values) &&\n typeof values !== 'number' && typeof values !== 'boolean' &&\n typeof values !== 'string') {\n throw new Error('values passed to tensor(values) must be a number/boolean/string or ' +\n 'an array of numbers/booleans/strings, or a TypedArray');\n }\n // Verify that the shape matches the inferred shape.\n if (shape != null) {\n assertNonNegativeIntegerDimensions(shape);\n var providedSize_1 = sizeFromShape(shape);\n var inferredSize_1 = sizeFromShape(inferredShape);\n assert(providedSize_1 === inferredSize_1, function () { return \"Based on the provided shape, [\".concat(shape, \"], the tensor should have \") +\n \"\".concat(providedSize_1, \" values but has \").concat(inferredSize_1); });\n for (var i = 0; i < inferredShape.length; ++i) {\n var inferred = inferredShape[i];\n var flatDimsDontMatch = i === inferredShape.length - 1 ?\n inferred !== sizeFromShape(shape.slice(i)) :\n true;\n assert(inferredShape[i] === shape[i] || !flatDimsDontMatch, function () { return \"Error creating a new Tensor. Inferred shape \" +\n \"(\".concat(inferredShape, \") does not match the provided \") +\n \"shape (\".concat(shape, \"). \"); });\n }\n }\n if (!isTypedArray(values) && !Array.isArray(values)) {\n values = [values];\n }\n shape = shape || inferredShape;\n values = dtype !== 'string' ?\n toTypedArray(values, dtype) :\n flatten(values, [], true);\n return ENGINE.makeTensor(values, shape, dtype);\n }", "docstring": "/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-core/dist/tf-core.js#L5637-L5685", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cropAndResize_", "code": "function cropAndResize_(image, boxes, boxInd, cropSize, method, extrapolationValue) {\n if (method === void 0) { method = 'bilinear'; }\n if (extrapolationValue === void 0) { extrapolationValue = 0; }\n var $image = convertToTensor(image, 'image', 'cropAndResize');\n var $boxes = convertToTensor(boxes, 'boxes', 'cropAndResize', 'float32');\n var $boxInd = convertToTensor(boxInd, 'boxInd', 'cropAndResize', 'int32');\n var numBoxes = $boxes.shape[0];\n assert($image.rank === 4, function () { return 'Error in cropAndResize: image must be rank 4,' +\n \"but got rank \".concat($image.rank, \".\"); });\n assert($boxes.rank === 2 && $boxes.shape[1] === 4, function () { return \"Error in cropAndResize: boxes must be have size [\".concat(numBoxes, \",4] \") +\n \"but had shape \".concat($boxes.shape, \".\"); });\n assert($boxInd.rank === 1 && $boxInd.shape[0] === numBoxes, function () { return \"Error in cropAndResize: boxInd must be have size [\".concat(numBoxes, \"] \") +\n \"but had shape \".concat($boxes.shape, \".\"); });\n assert(cropSize.length === 2, function () { return \"Error in cropAndResize: cropSize must be of length 2, but got \" +\n \"length \".concat(cropSize.length, \".\"); });\n assert(cropSize[0] >= 1 && cropSize[1] >= 1, function () { return \"cropSize must be atleast [1,1], but was \".concat(cropSize); });\n assert(method === 'bilinear' || method === 'nearest', function () { return \"method must be bilinear or nearest, but was \".concat(method); });\n var inputs = { image: $image, boxes: $boxes, boxInd: $boxInd };\n var attrs = { method: method, extrapolationValue: extrapolationValue, cropSize: cropSize };\n var res = ENGINE.runKernel(CropAndResize, inputs, attrs);\n return res;\n }", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-core/dist/tf-core.js#L20861-L20882", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fromInt", "code": "function fromInt(value, unsigned) {\n var obj, cachedObj, cache;\n if (unsigned) {\n value >>>= 0;\n if (cache = (0 <= value && value < 256)) {\n cachedObj = UINT_CACHE[value];\n if (cachedObj)\n return cachedObj;\n }\n obj = fromBits(value, (value | 0) < 0 ? -1 : 0, true);\n if (cache)\n UINT_CACHE[value] = obj;\n return obj;\n }\n else {\n value |= 0;\n if (cache = (-128 <= value && value < 128)) {\n cachedObj = INT_CACHE[value];\n if (cachedObj)\n return cachedObj;\n }\n obj = fromBits(value, value < 0 ? -1 : 0, false);\n if (cache)\n INT_CACHE[value] = obj;\n return obj;\n }\n }", "docstring": "/**\n * @param {number} value\n * @param {boolean=} unsigned\n * @returns {!Long}\n * @inner\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs-layers/dist/tf-layers.js#L5451-L5477", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "softmaxCrossEntropyWithLogits_", "code": "function softmaxCrossEntropyWithLogits_(labels, logits, dim = -1) {\n if (dim === -1) {\n dim = logits.rank - 1;\n }\n if (dim !== logits.rank - 1) {\n throw Error(`Softmax cross entropy along a non-last dimension is not yet ` +\n `supported. Labels / logits was rank ${logits.rank} ` +\n `and dim was ${dim}`);\n }\n // Use a custom gradient for numerical stability.\n const customOp = customGrad((labels, logits, save) => {\n // Reference:\n // 1. http://cs231n.github.io/linear-classify/#softmax\n // 2. https://blog.feedly.com/tricks-of-the-trade-logsumexp/\n const keepDims = true;\n const lse = logSumExp(logits, [dim], keepDims);\n const logResult = sub(cast(logits, 'float32'), lse);\n save([labels, logResult]);\n const costVector = neg(mul(logResult, labels));\n const value = sum$1(costVector, [dim]);\n const gradFunc = (dy, saved) => {\n const [labels, logResult] = saved;\n const dyShape = expandShapeToKeepDim(dy.shape, [dim]);\n return [\n mul(reshape(dy, dyShape), sub(cast(labels, 'float32'), exp(logResult))),\n mul(reshape(dy, dyShape), sub(exp(logResult), cast(labels, 'float32'))),\n ];\n };\n return { value, gradFunc };\n });\n return customOp(labels, logits);\n }", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.es2017.js#L22957-L22988", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "simpleRNN", "code": "function simpleRNN(args) {\n return new SimpleRNN(args);\n }", "docstring": "/**\n * Fully-connected RNN where the output is to be fed back to input.\n *\n * This is an `RNN` layer consisting of one `SimpleRNNCell`. However, unlike\n * the underlying `SimpleRNNCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.simpleRNN({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `SimpleRNNCell`'s number of units.\n * ```\n *\n * @doc {heading: 'Layers', subheading: 'Recurrent', namespace: 'layers'}\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.es2017.js#L53836-L53838", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "binaryCrossentropy$2", "code": "function binaryCrossentropy$2(yTrue, yPred) {\n return binaryCrossentropy$1(yTrue, yPred);\n }", "docstring": "/**\n * Binary crossentropy metric function.\n *\n * Example:\n * ```js\n * const x = tf.tensor2d([[0], [1], [1], [1]]);\n * const y = tf.tensor2d([[0], [0], [0.5], [1]]);\n * const crossentropy = tf.metrics.binaryCrossentropy(x, y);\n * crossentropy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction, probabilities for the `1` case.\n * @return Accuracy Tensor.\n *\n * @doc {heading: 'Metrics', namespace: 'metrics'}\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.es2017.js#L54464-L54466", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "oneHot$2", "code": "function oneHot$2(args) {\n const { inputs, backend, attrs } = args;\n const { indices } = inputs;\n const { dtype, depth, onValue, offValue } = attrs;\n assertNotComplex(indices, 'oneHot');\n const indicesSize = sizeFromShape(indices.shape);\n const res = new Float32Array(indicesSize * depth);\n res.fill(offValue);\n const indicesVal = backend.data.get(indices.dataId).values;\n for (let event = 0; event < indicesSize; ++event) {\n if (indicesVal[event] >= 0 && indicesVal[event] < depth) {\n res[event * depth + indicesVal[event]] = onValue;\n }\n }\n return backend.makeTensorInfo([...indices.shape, depth], dtype, res);\n }", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.es2017.js#L78823-L78838", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "transform$2", "code": "function transform$2(args) {\n const { inputs, backend, attrs } = args;\n const { image, transforms } = inputs;\n const { interpolation, fillMode, fillValue, outputShape } = attrs;\n const [batch, imageHeight, imageWidth, numChannels] = image.shape;\n const [outHeight, outWidth] = outputShape != null ? outputShape : [imageHeight, imageWidth];\n const outShape = [batch, outHeight, outWidth,\n numChannels];\n const program = new TransformProgram(imageHeight, imageWidth, interpolation, fillMode, fillValue, outShape);\n return backend.runWebGLProgram(program, [image, transforms], 'float32');\n }", "docstring": "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.es2017.js#L101389-L101399", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "bandPart_", "code": "function bandPart_(a, numLower, numUpper) {\n assert(numLower % 1 === 0, () => `bandPart(): numLower must be an integer, got ${numLower}.`);\n assert(numUpper % 1 === 0, () => `bandPart(): numUpper must be an integer, got ${numUpper}.`);\n const $a = convertToTensor(a, 'a', 'bandPart');\n assert($a.rank >= 2, () => `bandPart(): Rank must be at least 2, got ${$a.rank}.`);\n const shape = $a.shape;\n const [M, N] = $a.shape.slice(-2);\n if (!(numLower <= M)) {\n throw new Error(`bandPart(): numLower (${numLower})` +\n ` must not be greater than the number of rows (${M}).`);\n }\n if (!(numUpper <= N)) {\n throw new Error(`bandPart(): numUpper (${numUpper})` +\n ` must not be greater than the number of columns (${N}).`);\n }\n if (numLower < 0) {\n numLower = M;\n }\n if (numUpper < 0) {\n numUpper = N;\n }\n const i = reshape(range(0, M, 1, 'int32'), [-1, 1]);\n const j = range(0, N, 1, 'int32');\n const ij = sub(i, j);\n const inBand = logicalAnd(lessEqual(ij, scalar(+numLower, 'int32')), greaterEqual(ij, scalar(-numUpper, 'int32')));\n const zero = zeros([M, N], $a.dtype);\n return reshape(stack(unstack(reshape($a, [-1, M, N]))\n .map(mat => where(inBand, mat, zero))), shape);\n}", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.fesm.js#L22236-L22264", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "standardizeSampleOrClassWeights", "code": "function standardizeSampleOrClassWeights(xWeight, outputNames, weightType) {\n const numOutputs = outputNames.length;\n if (xWeight == null || (Array.isArray(xWeight) && xWeight.length === 0)) {\n return outputNames.map(name => null);\n }\n if (numOutputs === 1) {\n if (Array.isArray(xWeight) && xWeight.length === 1) {\n return xWeight;\n }\n else if (typeof xWeight === 'object' && outputNames[0] in xWeight) {\n return [xWeight[outputNames[0]]];\n }\n else {\n return [xWeight];\n }\n }\n if (Array.isArray(xWeight)) {\n if (xWeight.length !== numOutputs) {\n throw new Error(`Provided ${weightType} is an array of ${xWeight.length} ` +\n `element(s), but the model has ${numOutputs} outputs. ` +\n `Make sure a set of weights is provided for each model output.`);\n }\n return xWeight;\n }\n else if (typeof xWeight === 'object' && Object.keys(xWeight).length > 0 &&\n typeof xWeight[Object.keys(xWeight)[0]] ===\n 'object') {\n const output = [];\n outputNames.forEach(outputName => {\n if (outputName in xWeight) {\n output.push(xWeight[outputName]);\n }\n else {\n output.push(null);\n }\n });\n return output;\n }\n else {\n throw new Error(`The model has multiple (${numOutputs}) outputs, ` +\n `so ${weightType} must be either an array with ` +\n `${numOutputs} elements or an object with ${outputNames} keys. ` +\n `Provided ${weightType} not understood: ${JSON.stringify(xWeight)}`);\n }\n}", "docstring": "/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.fesm.js#L42060-L42104", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "gatherV2Impl", "code": "function gatherV2Impl(xBuf, indicesBuf, flattenOutputShape) {\n const outBuf = buffer(flattenOutputShape, xBuf.dtype);\n for (let i = 0; i < outBuf.size; ++i) {\n const newLoc = outBuf.indexToLoc(i);\n const originalLoc = newLoc.slice();\n const batchIdx = originalLoc[0];\n const indicesIdx = originalLoc[2];\n const indicesIndex = indicesBuf.locToIndex([batchIdx, indicesIdx]);\n originalLoc[2] = indicesBuf.values[indicesIndex];\n const originalIndex = xBuf.locToIndex(originalLoc);\n if (0 <= originalIndex && originalIndex < xBuf.values.length) {\n outBuf.values[i] = xBuf.values[originalIndex];\n } // Else, index is out of bounds, so leave the default zero val in outBuf.\n }\n return outBuf;\n}", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.fesm.js#L71298-L71313", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getEinsumPermutation", "code": "function getEinsumPermutation(nDims, idDims) {\n\t var permutationIndices = new Array(nDims);\n\t permutationIndices.fill(-1);\n\n\t for (var i = 0; i < idDims.length; ++i) {\n\t permutationIndices[idDims[i]] = i;\n\t }\n\n\t var expandDims = [];\n\n\t for (var _i4 = 0; _i4 < nDims; ++_i4) {\n\t if (permutationIndices[_i4] === -1) {\n\t expandDims.push(_i4);\n\t }\n\t }\n\n\t permutationIndices = permutationIndices.filter(function (d) {\n\t return d !== -1;\n\t });\n\t return {\n\t permutationIndices: permutationIndices,\n\t expandDims: expandDims\n\t };\n\t}", "docstring": "/**\n\t * Get the permutation for a given input tensor.\n\t *\n\t * @param nDims Total number of dimension of all tensors involved in the einsum\n\t * operation.\n\t * @param idDims Dimension indices involve in the tensor in question.\n\t * @returns An object consisting of the following fields:\n\t * - permutationIndices: Indices to permute the axes of the tensor with.\n\t * - expandDims: Indices to the dimension that need to be expanded from the\n\t * tensor after permutation.\n\t */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.js#L48675-L48698", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "truncatedNormal_", "code": "function truncatedNormal_(shape, mean = 0, stdDev = 1, dtype, seed) {\n assertNonNegativeIntegerDimensions(shape);\n if (dtype != null && dtype === 'bool') {\n throw new Error(`Unsupported data type $ { dtype }`);\n }\n const randGauss = new MPRandGauss(mean, stdDev, dtype, true /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}", "docstring": "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.node.js#L22415-L22426", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OptimizerConstructors.adadelta", "code": "static adadelta(learningRate = .001, rho = .95, epsilon = null) {\n return new AdadeltaOptimizer(learningRate, rho, epsilon);\n }", "docstring": "/**\n * Constructs a `tf.AdadeltaOptimizer` that uses the Adadelta algorithm.\n * See [https://arxiv.org/abs/1212.5701](https://arxiv.org/abs/1212.5701)\n *\n * @param learningRate The learning rate to use for the Adadelta gradient\n * descent algorithm.\n * @param rho The learning rate decay over each update.\n * @param epsilon A constant epsilon used to better condition the grad\n * update.\n *\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.node.js#L27574-L27576", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getComplexWithIndex", "code": "function getComplexWithIndex(complex, index) {\n const real = complex[index * 2];\n const imag = complex[index * 2 + 1];\n return { real, imag };\n}", "docstring": "/**\n * Get the map representing a complex value in the given array.\n * @param complex The complex tensor values.\n * @param index An index of the target complex value.\n */", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/@tensorflow/tfjs/dist/tf.node.js#L28174-L28178", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Toast.Default", "code": "static get Default() {\n return Default;\n }", "docstring": "// Getters", "url": "https://github.com/wangjia184/diffusion_model/blob/5ea111bd405c1f893976808b768681d61e11c979/docs/bootstrap/dist/js/bootstrap.esm.js#L5031-L5033", "sha": "5ea111bd405c1f893976808b768681d61e11c979"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createCache", "code": "function createCache() {\n\tvar keys = [];\n\n\tfunction cache( key, value ) {\n\n\t\t// Use (key + \" \") to avoid collision with native prototype properties (see Issue #157)\n\t\tif ( keys.push( key + \" \" ) > Expr.cacheLength ) {\n\n\t\t\t// Only keep the most recent entries\n\t\t\tdelete cache[ keys.shift() ];\n\t\t}\n\t\treturn ( cache[ key + \" \" ] = value );\n\t}\n\treturn cache;\n}", "docstring": "/**\n * Create key-value caches of limited size\n * @returns {function(string, object)} Returns the Object data after storing it on itself with\n *\tproperty name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength)\n *\tdeleting the oldest entry\n */", "url": "https://github.com/PacktPublishing/Software-Architecture-with-C-Sharp-12-and-.NET-8-4E/blob/bb5f06a93c7a6ca281f88b0e3e16fb1302ebf943/ch11/MvcDockerTest/MvcDockerTest/wwwroot/lib/jquery/dist/jquery.js#L907-L921", "sha": "bb5f06a93c7a6ca281f88b0e3e16fb1302ebf943"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "findInsertionIndex", "code": "function findInsertionIndex(id) {\n // the start index should be `flushIndex + 1`\n let start = flushIndex + 1;\n let end = queue.length;\n while (start < end) {\n const middle = (start + end) >>> 1;\n const middleJobId = getId(queue[middle]);\n middleJobId < id ? (start = middle + 1) : (end = middle);\n }\n return start;\n}", "docstring": "// #2768", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/@vue/runtime-core/dist/runtime-core.esm-bundler.js#L266-L276", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getTargetAttrs", "code": "function getTargetAttrs(tagName) {\n if (attributes.cache[tagName]) {\n return attributes.cache[tagName]\n }\n /** @type {string[]} */\n const result = []\n if (attributes.names[tagName]) {\n result.push(...attributes.names[tagName])\n }\n for (const { name, attrs } of attributes.regexps) {\n name.lastIndex = 0\n if (name.test(tagName)) {\n result.push(...attrs)\n }\n }\n if (casing.isKebabCase(tagName)) {\n result.push(...getTargetAttrs(casing.pascalCase(tagName)))\n }\n\n return (attributes.cache[tagName] = new Set(result))\n }", "docstring": "/**\n * Get the attribute to be verified from the element name.\n * @param {string} tagName\n * @returns {Set}\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/eslint-plugin-vue/lib/rules/no-bare-strings-in-template.js#L176-L196", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isAwaitedPromise", "code": "function isAwaitedPromise(callExpression) {\n if (callExpression.parent.type === 'AwaitExpression') {\n // cases like `await nextTick()`\n return true\n }\n\n if (callExpression.parent.type === 'ReturnStatement') {\n // cases like `return nextTick()`\n return true\n }\n if (\n callExpression.parent.type === 'ArrowFunctionExpression' &&\n callExpression.parent.body === callExpression\n ) {\n // cases like `() => nextTick()`\n return true\n }\n\n if (\n callExpression.parent.type === 'MemberExpression' &&\n callExpression.parent.property.type === 'Identifier' &&\n callExpression.parent.property.name === 'then'\n ) {\n // cases like `nextTick().then()`\n return true\n }\n\n if (\n callExpression.parent.type === 'VariableDeclarator' ||\n callExpression.parent.type === 'AssignmentExpression'\n ) {\n // cases like `let foo = nextTick()` or `foo = nextTick()`\n return true\n }\n\n if (\n callExpression.parent.type === 'ArrayExpression' &&\n callExpression.parent.parent.type === 'CallExpression' &&\n callExpression.parent.parent.callee.type === 'MemberExpression' &&\n callExpression.parent.parent.callee.object.type === 'Identifier' &&\n callExpression.parent.parent.callee.object.name === 'Promise' &&\n callExpression.parent.parent.callee.property.type === 'Identifier'\n ) {\n // cases like `Promise.all([nextTick()])`\n return true\n }\n\n return false\n}", "docstring": "/**\n * @param {CallExpression} callExpression\n * @returns {boolean}\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/eslint-plugin-vue/lib/rules/valid-next-tick.js#L61-L109", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "buildVElementMatcher", "code": "function buildVElementMatcher(selectorValue, test) {\n const val = selector.insensitive\n ? selectorValue.toLowerCase()\n : selectorValue\n return (element) => {\n const attrValue = getAttributeValue(element, key)\n if (attrValue == null) {\n return false\n }\n return test(\n selector.insensitive ? attrValue.toLowerCase() : attrValue,\n val\n )\n }\n }", "docstring": "/**\n * @param {string} selectorValue\n * @param {(attrValue:string, selectorValue: string)=>boolean} test\n * @returns {VElementMatcher}\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/eslint-plugin-vue/lib/utils/selector.js#L298-L312", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FlatESLint.constructor", "code": "constructor(options = {}) {\n\n const defaultConfigs = [];\n const processedOptions = processOptions(options);\n const linter = new Linter({\n cwd: processedOptions.cwd,\n configType: \"flat\"\n });\n\n const cacheFilePath = getCacheFile(\n processedOptions.cacheLocation,\n processedOptions.cwd\n );\n\n const lintResultCache = processedOptions.cache\n ? new LintResultCache(cacheFilePath, processedOptions.cacheStrategy)\n : null;\n\n privateMembers.set(this, {\n options: processedOptions,\n linter,\n cacheFilePath,\n lintResultCache,\n defaultConfigs,\n defaultIgnores: () => false,\n configs: null\n });\n\n /**\n * If additional plugins are passed in, add that to the default\n * configs for this instance.\n */\n if (options.plugins) {\n\n const plugins = {};\n\n for (const [pluginName, plugin] of Object.entries(options.plugins)) {\n plugins[naming.getShorthandName(pluginName, \"eslint-plugin\")] = plugin;\n }\n\n defaultConfigs.push({\n plugins\n });\n }\n\n }", "docstring": "/**\n * Creates a new instance of the main ESLint API.\n * @param {FlatESLintOptions} options The options for this instance.\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/eslint/lib/eslint/flat-eslint.js#L589-L634", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "reportAccessingEval", "code": "function reportAccessingEval(globalScope) {\n const variable = astUtils.getVariableByName(globalScope, \"eval\");\n\n if (!variable) {\n return;\n }\n\n const references = variable.references;\n\n for (let i = 0; i < references.length; ++i) {\n const reference = references[i];\n const id = reference.identifier;\n\n if (id.name === \"eval\" && !astUtils.isCallee(id)) {\n\n // Is accessing to eval (excludes direct calls to eval)\n report(id);\n }\n }\n }", "docstring": "/**\n * Reports all accesses of `eval` (excludes direct calls to eval).\n * @param {eslint-scope.Scope} globalScope The global scope.\n * @returns {void}\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/eslint/lib/rules/no-eval.js#L174-L193", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "markReturnStatementsOnSegmentAsUsed", "code": "function markReturnStatementsOnSegmentAsUsed(segment) {\n if (!segment.reachable) {\n usedUnreachableSegments.add(segment);\n segment.allPrevSegments\n .filter(isReturned)\n .filter(prevSegment => !usedUnreachableSegments.has(prevSegment))\n .forEach(markReturnStatementsOnSegmentAsUsed);\n return;\n }\n\n const info = segmentInfoMap.get(segment);\n\n for (const node of info.uselessReturns) {\n remove(scopeInfo.uselessReturns, node);\n }\n info.uselessReturns = [];\n }", "docstring": "/**\n * Removes the return statements on the given segment from the useless return\n * statement list.\n *\n * This segment may be an unreachable segment.\n * In that case, the information object of the unreachable segment is not\n * initialized because `onCodePathSegmentStart` event is not notified for\n * unreachable segments.\n * This goes to the previous segments of the unreachable segment recursively\n * if the unreachable segment was generated by a return statement. Otherwise,\n * this ignores the unreachable segment.\n *\n * This behavior would simulate code paths for the case that the return\n * statement does not exist.\n * @param {CodePathSegment} segment The segment to get return statements.\n * @returns {void}\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/eslint/lib/rules/no-useless-return.js#L157-L173", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "startsWithUpperCase", "code": "function startsWithUpperCase(s) {\n return s[0] !== s[0].toLocaleLowerCase();\n}", "docstring": "/**\n * Checks whether the given string starts with uppercase or not.\n * @param {string} s The string to check.\n * @returns {boolean} `true` if the string starts with uppercase.\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/eslint/lib/rules/utils/ast-utils.js#L79-L81", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isClosingParenToken", "code": "function isClosingParenToken(token) {\n return token.value === \")\" && token.type === \"Punctuator\";\n}", "docstring": "/**\n * Checks if the given token is a closing parenthesis token or not.\n * @param {Token} token The token to check.\n * @returns {boolean} `true` if the token is a closing parenthesis token.\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/eslint/lib/rules/utils/ast-utils.js#L599-L601", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isArrayLike", "code": "function isArrayLike(value) {\n return value != null && isLength(value.length) && !isFunction(value);\n}", "docstring": "/**\n * Checks if `value` is array-like. A value is considered array-like if it's\n * not a function and has a `value.length` that's an integer greater than or\n * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`.\n *\n * @static\n * @memberOf _\n * @since 4.0.0\n * @category Lang\n * @param {*} value The value to check.\n * @returns {boolean} Returns `true` if `value` is array-like, else `false`.\n * @example\n *\n * _.isArrayLike([1, 2, 3]);\n * // => true\n *\n * _.isArrayLike(document.body.children);\n * // => true\n *\n * _.isArrayLike('abc');\n * // => true\n *\n * _.isArrayLike(_.noop);\n * // => false\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/lodash.merge/index.js#L1597-L1599", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "objectToString", "code": "function objectToString(value) {\n return nativeObjectToString.call(value);\n}", "docstring": "/**\n * Converts `value` to a string using `Object.prototype.toString`.\n *\n * @private\n * @param {*} value The value to convert.\n * @returns {string} Returns the converted string.\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/lodash/_objectToString.js#L18-L20", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "dropRightWhile", "code": "function dropRightWhile(array, predicate) {\n return (array && array.length)\n ? baseWhile(array, getIteratee(predicate, 3), true, true)\n : [];\n }", "docstring": "/**\n * Creates a slice of `array` excluding elements dropped from the end.\n * Elements are dropped until `predicate` returns falsey. The predicate is\n * invoked with three arguments: (value, index, array).\n *\n * @static\n * @memberOf _\n * @since 3.0.0\n * @category Array\n * @param {Array} array The array to query.\n * @param {Function} [predicate=_.identity] The function invoked per iteration.\n * @returns {Array} Returns the slice of `array`.\n * @example\n *\n * var users = [\n * { 'user': 'barney', 'active': true },\n * { 'user': 'fred', 'active': false },\n * { 'user': 'pebbles', 'active': false }\n * ];\n *\n * _.dropRightWhile(users, function(o) { return !o.active; });\n * // => objects for ['barney']\n *\n * // The `_.matches` iteratee shorthand.\n * _.dropRightWhile(users, { 'user': 'pebbles', 'active': false });\n * // => objects for ['barney', 'fred']\n *\n * // The `_.matchesProperty` iteratee shorthand.\n * _.dropRightWhile(users, ['active', false]);\n * // => objects for ['barney']\n *\n * // The `_.property` iteratee shorthand.\n * _.dropRightWhile(users, 'active');\n * // => objects for ['barney', 'fred', 'pebbles']\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/lodash/lodash.js#L7190-L7194", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "browserConvert", "code": "function browserConvert(lodash, options) {\n return baseConvert(lodash, lodash, options);\n}", "docstring": "/**\n * Converts `lodash` to an immutable auto-curried iteratee-first data-last\n * version with conversion `options` applied.\n *\n * @param {Function} lodash The lodash function to convert.\n * @param {Object} [options] The options object. See `baseConvert` for more details.\n * @returns {Function} Returns the converted `lodash`.\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/lodash/fp/_convertBrowser.js#L11-L13", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Chunk.preRender", "code": "preRender(options, inputBase, snippets) {\n const { _, getPropertyAccess, n } = snippets;\n const magicString = new Bundle$1({ separator: `${n}${n}` });\n this.usedModules = [];\n this.indentString = getIndentString(this.orderedModules, options);\n const renderOptions = {\n dynamicImportFunction: options.dynamicImportFunction,\n exportNamesByVariable: this.exportNamesByVariable,\n format: options.format,\n freeze: options.freeze,\n indent: this.indentString,\n namespaceToStringTag: options.namespaceToStringTag,\n outputPluginDriver: this.pluginDriver,\n snippets\n };\n // for static and dynamic entry points, inline the execution list to avoid loading latency\n if (options.hoistTransitiveImports &&\n !this.outputOptions.preserveModules &&\n this.facadeModule !== null) {\n for (const dep of this.dependencies) {\n if (dep instanceof Chunk)\n this.inlineChunkDependencies(dep);\n }\n }\n this.prepareModulesForRendering(snippets);\n this.setIdentifierRenderResolutions(options);\n let hoistedSource = '';\n const renderedModules = this.renderedModules;\n for (const module of this.orderedModules) {\n let renderedLength = 0;\n if (module.isIncluded() || this.includedNamespaces.has(module)) {\n const source = module.render(renderOptions).trim();\n renderedLength = source.length();\n if (renderedLength) {\n if (options.compact && source.lastLine().includes('//'))\n source.append('\\n');\n this.renderedModuleSources.set(module, source);\n magicString.addSource(source);\n this.usedModules.push(module);\n }\n const namespace = module.namespace;\n if (this.includedNamespaces.has(module) && !this.outputOptions.preserveModules) {\n const rendered = namespace.renderBlock(renderOptions);\n if (namespace.renderFirst())\n hoistedSource += n + rendered;\n else\n magicString.addSource(new MagicString(rendered));\n }\n }\n const { renderedExports, removedExports } = module.getRenderedExports();\n const { renderedModuleSources } = this;\n renderedModules[module.id] = {\n get code() {\n var _a, _b;\n return (_b = (_a = renderedModuleSources.get(module)) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : null;\n },\n originalLength: module.originalCode.length,\n removedExports,\n renderedExports,\n renderedLength\n };\n }\n if (hoistedSource)\n magicString.prepend(hoistedSource + n + n);\n if (this.needsExportsShim) {\n magicString.prepend(`${n}${snippets.cnst} ${MISSING_EXPORT_SHIM_VARIABLE}${_}=${_}void 0;${n}${n}`);\n }\n if (options.compact) {\n this.renderedSource = magicString;\n }\n else {\n this.renderedSource = magicString.trim();\n }\n this.renderedHash = undefined;\n if (this.isEmpty && this.getExportNames().length === 0 && this.dependencies.size === 0) {\n const chunkName = this.getChunkName();\n this.inputOptions.onwarn({\n chunkName,\n code: 'EMPTY_BUNDLE',\n message: `Generated an empty chunk: \"${chunkName}\"`\n });\n }\n this.setExternalRenderPaths(options, inputBase);\n this.renderedDependencies = this.getChunkDependencyDeclarations(options, getPropertyAccess);\n this.renderedExports =\n this.exportMode === 'none'\n ? []\n : this.getChunkExportDeclarations(options.format, getPropertyAccess);\n }", "docstring": "// prerender allows chunk hashes and names to be generated before finalizing", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/rollup/dist/shared/rollup.js#L14880-L14968", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "generatedPositionAfter", "code": "function generatedPositionAfter(mappingA, mappingB) {\n // Optimized for most common case\n var lineA = mappingA.generatedLine;\n var lineB = mappingB.generatedLine;\n var columnA = mappingA.generatedColumn;\n var columnB = mappingB.generatedColumn;\n return lineB > lineA || lineB == lineA && columnB >= columnA ||\n util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n}", "docstring": "/**\n * Determine whether mappingB is after mappingA with respect to generated\n * position.\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/source-map/lib/mapping-list.js#L14-L22", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "visit", "code": "function visit(cst, visitor) {\n if ('type' in cst && cst.type === 'document')\n cst = { start: cst.start, value: cst.value };\n _visit(Object.freeze([]), cst, visitor);\n}", "docstring": "/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n * - `item`: The current item, which included the following members:\n * - `start: SourceToken[]` – Source tokens before the key or value,\n * possibly including its anchor or tag.\n * - `key?: Token | null` – Set for pair values. May then be `null`, if\n * the key before the `:` separator is empty.\n * - `sep?: SourceToken[]` – Source tokens between the key and the value,\n * which should include the `:` map value indicator if `value` is set.\n * - `value?: Token` – The value of a sequence item, or of a map pair.\n * - `path`: The steps from the root to the current node, as an array of\n * `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this token, continue with\n * next sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current item, then continue with the next one\n * - `number`: Set the index of the next step. This is useful especially if\n * the index of the current token has changed.\n * - `function`: Define the next visitor for this item. After the original\n * visitor is called on item entry, next visitors are called after handling\n * a non-empty `key` and when exiting the item.\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/vite/dist/node/chunks/dep-5605cfa4.js#L23806-L23810", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "VueElement._resolveDef", "code": "_resolveDef() {\n this._resolved = true;\n // set initial attrs\n for (let i = 0; i < this.attributes.length; i++) {\n this._setAttr(this.attributes[i].name);\n }\n // watch future attr changes\n new MutationObserver(mutations => {\n for (const m of mutations) {\n this._setAttr(m.attributeName);\n }\n }).observe(this, { attributes: true });\n const resolve = (def, isAsync = false) => {\n const { props, styles } = def;\n // cast Number-type props set before resolve\n let numberProps;\n if (props && !isArray(props)) {\n for (const key in props) {\n const opt = props[key];\n if (opt === Number || (opt && opt.type === Number)) {\n if (key in this._props) {\n this._props[key] = toNumber(this._props[key]);\n }\n (numberProps || (numberProps = Object.create(null)))[camelize(key)] = true;\n }\n }\n }\n this._numberProps = numberProps;\n if (isAsync) {\n // defining getter/setters on prototype\n // for sync defs, this already happened in the constructor\n this._resolveProps(def);\n }\n // apply CSS\n this._applyStyles(styles);\n // initial render\n this._update();\n };\n const asyncDef = this._def.__asyncLoader;\n if (asyncDef) {\n asyncDef().then(def => resolve(def, true));\n }\n else {\n resolve(this._def);\n }\n }", "docstring": "/**\n * resolve inner component definition (handle possible async component)\n */", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/vue/dist/vue.esm-browser.js#L9958-L10003", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "useHydration", "code": "function useHydration() {\n if (!IN_BROWSER) return ref(false);\n const {\n ssr\n } = useDisplay();\n if (ssr) {\n const isMounted = ref(false);\n onMounted(() => {\n isMounted.value = true;\n });\n return isMounted;\n } else {\n return ref(true);\n }\n}", "docstring": "// Utilities", "url": "https://github.com/MrPeterJin/researchgpt/blob/722c28ee04655cc4359c036f5186ae6b478110ee/frontend/node_modules/vuetify/dist/vuetify.esm.js#L9221-L9235", "sha": "722c28ee04655cc4359c036f5186ae6b478110ee"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReadableStreamDefaultReader.read", "code": "read() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('read'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: () => resolvePromise({ value: undefined, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamDefaultReaderRead(this, readRequest);\n return promise;\n }", "docstring": "/**\n * Returns a promise that allows access to the next chunk from the stream's internal queue, if available.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */", "url": "https://github.com/coderabbitai/ai-pr-reviewer/blob/d5ec3970b3acc4b9d673e6cd601bf4d3cf043b55/dist/index.js#L30824-L30844", "sha": "d5ec3970b3acc4b9d673e6cd601bf4d3cf043b55"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "backup", "code": "async function backup(refLog) {\n const config = await new configClass().getConfig();\n\n if (config.error) {\n refLog.logData.push({ color: \"red\", Message: \"Backup Failed: Failed to get config\" });\n refLog.logData.push({ color: \"red\", Message: \"Backup Failed with errors\" });\n Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);\n return;\n }\n\n refLog.logData.push({ color: \"lawngreen\", Message: \"Starting Backup\" });\n const pool = new Pool({\n user: postgresUser,\n password: postgresPassword,\n host: postgresIp,\n port: postgresPort,\n database: postgresDatabase,\n });\n\n // Get data from each table and append it to the backup file\n\n try {\n let now = moment();\n const backuppath = \"./\" + backupfolder;\n\n if (!fs.existsSync(backuppath)) {\n fs.mkdirSync(backuppath);\n console.log(\"Directory created successfully!\");\n }\n if (!checkFolderWritePermission(backuppath)) {\n console.error(\"No write permissions for the folder:\", backuppath);\n refLog.logData.push({ color: \"red\", Message: \"Backup Failed: No write permissions for the folder: \" + backuppath });\n refLog.logData.push({ color: \"red\", Message: \"Backup Failed with errors\" });\n Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);\n await pool.end();\n return;\n }\n\n const ExcludedTables = config.settings?.ExcludedTables || [];\n\n let filteredTables = tables.filter((table) => !ExcludedTables.includes(table.value));\n\n if (filteredTables.length === 0) {\n refLog.logData.push({ color: \"red\", Message: \"Backup Failed: No tables to backup\" });\n refLog.logData.push({ color: \"red\", Message: \"Backup Failed with errors\" });\n Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);\n await pool.end();\n return;\n }\n\n // const backupPath = `../backup-data/backup_${now.format('yyyy-MM-DD HH-mm-ss')}.json`;\n const directoryPath = path.join(__dirname, \"..\", backupfolder, `backup_${now.format(\"yyyy-MM-DD HH-mm-ss\")}.json`);\n refLog.logData.push({ color: \"yellow\", Message: \"Begin Backup \" + directoryPath });\n const stream = fs.createWriteStream(directoryPath, { flags: \"a\" });\n stream.on(\"error\", (error) => {\n refLog.logData.push({ color: \"red\", Message: \"Backup Failed: \" + error });\n Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);\n return;\n });\n const backup_data = [];\n\n for (let table of filteredTables) {\n const query = `SELECT * FROM ${table.value}`;\n\n const { rows } = await pool.query(query);\n refLog.logData.push({ color: \"dodgerblue\", Message: `Saving ${rows.length} rows for table ${table.value}` });\n\n backup_data.push({ [table.value]: rows });\n }\n\n await stream.write(JSON.stringify(backup_data));\n stream.end();\n refLog.logData.push({ color: \"lawngreen\", Message: \"Backup Complete\" });\n refLog.logData.push({ color: \"dodgerblue\", Message: \"Removing old backups\" });\n\n //Cleanup excess backups\n let deleteCount = 0;\n const directoryPathDelete = path.join(__dirname, \"..\", backupfolder);\n\n const files = await new Promise((resolve, reject) => {\n fs.readdir(directoryPathDelete, (err, files) => {\n if (err) {\n reject(err);\n } else {\n resolve(files);\n }\n });\n });\n\n let fileData = files\n .filter((file) => file.endsWith(\".json\"))\n .map((file) => {\n const filePath = path.join(directoryPathDelete, file);\n const stats = fs.statSync(filePath);\n return {\n name: file,\n size: stats.size,\n datecreated: stats.birthtime,\n };\n });\n\n fileData = fileData.sort((a, b) => new Date(b.datecreated) - new Date(a.datecreated)).slice(5);\n\n for (var oldBackup of fileData) {\n const oldBackupFile = path.join(__dirname, \"..\", backupfolder, oldBackup.name);\n\n await new Promise((resolve, reject) => {\n fs.unlink(oldBackupFile, (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n\n deleteCount += 1;\n refLog.logData.push({ color: \"yellow\", Message: `${oldBackupFile} has been deleted.` });\n }\n\n refLog.logData.push({ color: \"lawngreen\", Message: deleteCount + \" backups removed.\" });\n } catch (error) {\n console.log(error);\n refLog.logData.push({ color: \"red\", Message: \"Backup Failed: \" + error });\n Logging.updateLog(refLog.uuid, refLog.logData, taskstate.FAILED);\n }\n\n await pool.end();\n}", "docstring": "// Backup function", "url": "https://github.com/CyferShepard/Jellystat/blob/99a8c49e6e226636274d3be88f42622aa8e52880/backend/classes/backup.js#L31-L159", "sha": "99a8c49e6e226636274d3be88f42622aa8e52880"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WritableStreamDefaultWriterAbort", "code": "function WritableStreamDefaultWriterAbort(writer, reason) {\n const stream = writer._ownerWritableStream;\n return WritableStreamAbort(stream, reason);\n }", "docstring": "// A client of WritableStreamDefaultWriter may use these functions directly to bypass state check.", "url": "https://github.com/elementdavv/internet_archive_downloader/blob/c81f05f33498dbb771570cd22a2bca6fd779ebfd/moz/js/utils/ponyfill.es6.js#L2143-L2146", "sha": "c81f05f33498dbb771570cd22a2bca6fd779ebfd"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "computeStyleTests", "code": "function computeStyleTests() {\n\n\t\t// This is a singleton, we need to execute it only once\n\t\tif ( !div ) {\n\t\t\treturn;\n\t\t}\n\n\t\tcontainer.style.cssText = \"position:absolute;left:-11111px;width:60px;\" +\n\t\t\t\"margin-top:1px;padding:0;border:0\";\n\t\tdiv.style.cssText =\n\t\t\t\"position:relative;display:block;box-sizing:border-box;overflow:scroll;\" +\n\t\t\t\"margin:auto;border:1px;padding:1px;\" +\n\t\t\t\"width:60%;top:1%\";\n\t\tdocumentElement.appendChild( container ).appendChild( div );\n\n\t\tvar divStyle = window.getComputedStyle( div );\n\t\tpixelPositionVal = divStyle.top !== \"1%\";\n\n\t\t// Support: Android 4.0 - 4.3 only, Firefox <=3 - 44\n\t\treliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12;\n\n\t\t// Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3\n\t\t// Some styles come back with percentage values, even though they shouldn't\n\t\tdiv.style.right = \"60%\";\n\t\tpixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36;\n\n\t\t// Support: IE 9 - 11 only\n\t\t// Detect misreporting of content dimensions for box-sizing:border-box elements\n\t\tboxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36;\n\n\t\t// Support: IE 9 only\n\t\t// Detect overflow:scroll screwiness (gh-3699)\n\t\t// Support: Chrome <=64\n\t\t// Don't get tricked when zoom affects offsetWidth (gh-4029)\n\t\tdiv.style.position = \"absolute\";\n\t\tscrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12;\n\n\t\tdocumentElement.removeChild( container );\n\n\t\t// Nullify the div so it wouldn't be stored in the memory and\n\t\t// it will also be a sign that checks already performed\n\t\tdiv = null;\n\t}", "docstring": "// Executing both pixelPosition & boxSizingReliable tests require only one layout", "url": "https://github.com/DamianEdwards/RazorSlices/blob/cf2e413551e49c3a3b40623ce020d678697f123a/samples/RazorSlices.Samples.WebApp/wwwroot/lib/jquery/dist/jquery.slim.js#L6522-L6564", "sha": "cf2e413551e49c3a3b40623ce020d678697f123a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "escapeMarkdownV2", "code": "function escapeMarkdownV2(text) {\n const ESCAPE_CHARACTERS = /[_*[\\]()~>#\\+\\-=|{}.!]/g;\n return text.replace(ESCAPE_CHARACTERS, '\\\\$&');\n}", "docstring": "// Telegraf for Telegram bot integration", "url": "https://github.com/josephrocca/OpenCharacters/blob/bfc1acbc68c27434e24b6837678867cc439dbfba/plugins/telegram/server.js#L44-L47", "sha": "bfc1acbc68c27434e24b6837678867cc439dbfba"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ɵɵresolveDocument", "code": "function ɵɵresolveDocument(element) {\n return { name: 'document', target: element.ownerDocument };\n}", "docstring": "/**\n *\n * @codeGenApi\n */", "url": "https://github.com/Totodore/socketioxide/blob/a480430ca3f611f9a550268e23d4ee455202e642/examples/angular-todomvc/dist/vendor.js#L24692-L24694", "sha": "a480430ca3f611f9a550268e23d4ee455202e642"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DomEventsPlugin.supports", "code": "supports(eventName) {\n return true;\n }", "docstring": "// This plugin should come last in the list of plugins, because it accepts all", "url": "https://github.com/Totodore/socketioxide/blob/a480430ca3f611f9a550268e23d4ee455202e642/examples/angular-todomvc/dist/vendor.js#L52229-L52231", "sha": "a480430ca3f611f9a550268e23d4ee455202e642"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "formatCurrency", "code": "function formatCurrency(value, locale, currency, currencyCode, digitsInfo) {\n const format = getLocaleNumberFormat(locale, NumberFormatStyle.Currency);\n const pattern = parseNumberFormat(format, getLocaleNumberSymbol(locale, NumberSymbol.MinusSign));\n pattern.minFrac = getNumberOfCurrencyDigits(currencyCode);\n pattern.maxFrac = pattern.minFrac;\n const res = formatNumberToLocaleString(value, pattern, locale, NumberSymbol.CurrencyGroup, NumberSymbol.CurrencyDecimal, digitsInfo);\n return res\n .replace(CURRENCY_CHAR, currency)\n // if we have 2 time the currency character, the second one is ignored\n .replace(CURRENCY_CHAR, '')\n // If there is a spacing between currency character and the value and\n // the currency character is supressed by passing an empty string, the\n // spacing character would remain as part of the string. Then we\n // should remove it.\n .trim();\n}", "docstring": "/**\n * @ngModule CommonModule\n * @description\n *\n * Formats a number as currency using locale rules.\n *\n * @param value The number to format.\n * @param locale A locale code for the locale format rules to use.\n * @param currency A string containing the currency symbol or its name,\n * such as \"$\" or \"Canadian Dollar\". Used in output string, but does not affect the operation\n * of the function.\n * @param currencyCode The [ISO 4217](https://en.wikipedia.org/wiki/ISO_4217)\n * currency code, such as `USD` for the US dollar and `EUR` for the euro.\n * Used to determine the number of digits in the decimal part.\n * @param digitInfo Decimal representation options, specified by a string in the following format:\n * `{minIntegerDigits}.{minFractionDigits}-{maxFractionDigits}`. See `DecimalPipe` for more details.\n *\n * @returns The formatted currency value.\n *\n * @see `formatNumber()`\n * @see `DecimalPipe`\n * @see [Internationalization (i18n) Guide](https://angular.io/guide/i18n)\n *\n * @publicApi\n */", "url": "https://github.com/Totodore/socketioxide/blob/a480430ca3f611f9a550268e23d4ee455202e642/examples/angular-todomvc/dist/vendor.js#L57577-L57592", "sha": "a480430ca3f611f9a550268e23d4ee455202e642"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "arrayIndexOfSorted", "code": "function arrayIndexOfSorted(array, value) {\n return _arrayIndexOfSorted(array, value, 0);\n}", "docstring": "/**\n * Get an index of an `value` in a sorted `array`.\n *\n * NOTE:\n * - This uses binary search algorithm for fast removals.\n *\n * @param array A sorted array to binary search.\n * @param value The value to look for.\n * @returns index of the value.\n * - positive index if value found.\n * - negative index if value not found. (`~index` to get the value where it should have been\n * located)\n */", "url": "https://github.com/Totodore/socketioxide/blob/a480430ca3f611f9a550268e23d4ee455202e642/examples/basic-crud-application/dist/vendor.js#L22443-L22445", "sha": "a480430ca3f611f9a550268e23d4ee455202e642"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createRootComponent", "code": "function createRootComponent(componentView, componentDef, rootLView, rootContext, hostFeatures) {\n const tView = rootLView[TVIEW];\n // Create directive instance with factory() and store at next index in viewData\n const component = instantiateRootComponent(tView, rootLView, componentDef);\n rootContext.components.push(component);\n componentView[CONTEXT] = component;\n hostFeatures && hostFeatures.forEach((feature) => feature(component, componentDef));\n // We want to generate an empty QueryList for root content queries for backwards\n // compatibility with ViewEngine.\n if (componentDef.contentQueries) {\n const tNode = getCurrentTNode();\n ngDevMode && assertDefined(tNode, 'TNode expected');\n componentDef.contentQueries(1 /* Create */, component, tNode.directiveStart);\n }\n const rootTNode = getCurrentTNode();\n ngDevMode && assertDefined(rootTNode, 'tNode should have been already created');\n if (tView.firstCreatePass &&\n (componentDef.hostBindings !== null || componentDef.hostAttrs !== null)) {\n setSelectedIndex(rootTNode.index);\n const rootTView = rootLView[TVIEW];\n registerHostBindingOpCodes(rootTView, rootTNode, rootLView, rootTNode.directiveStart, rootTNode.directiveEnd, componentDef);\n invokeHostBindingsInCreationMode(componentDef, component);\n }\n return component;\n}", "docstring": "/**\n * Creates a root component and sets it up with features and host bindings. Shared by\n * renderComponent() and ViewContainerRef.createComponent().\n */", "url": "https://github.com/Totodore/socketioxide/blob/a480430ca3f611f9a550268e23d4ee455202e642/examples/basic-crud-application/dist/vendor.js#L30373-L30397", "sha": "a480430ca3f611f9a550268e23d4ee455202e642"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Plugin", "code": "function Plugin(option, _relatedTarget) {\n return this.each(function () {\n var $this = $(this)\n var data = $this.data('bs.modal')\n var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)\n\n if (!data) $this.data('bs.modal', (data = new Modal(this, options)))\n if (typeof option == 'string') data[option](_relatedTarget)\n else if (options.show) data.show(_relatedTarget)\n })\n }", "docstring": "// MODAL PLUGIN DEFINITION", "url": "https://github.com/souying/serverMmon/blob/d4ab92bb36dc1e273810ba5f1e33aa209bd81335/admin/js/bootstrap.js#L1208-L1218", "sha": "d4ab92bb36dc1e273810ba5f1e33aa209bd81335"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Express", "code": "function Express(options) {\n if (options === void 0) { options = {}; }\n /**\n * @inheritDoc\n */\n this.name = Express.id;\n this._router = options.router || options.app;\n this._methods = (Array.isArray(options.methods) ? options.methods : []).concat('use');\n }", "docstring": "/**\n * @inheritDoc\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/@sentry/tracing/esm/integrations/express.js#L12-L20", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "log", "code": "function log (message, site) {\n var haslisteners = eehaslisteners(process, 'deprecation')\n\n // abort early if no destination\n if (!haslisteners && this._ignored) {\n return\n }\n\n var caller\n var callFile\n var callSite\n var depSite\n var i = 0\n var seen = false\n var stack = getStack()\n var file = this._file\n\n if (site) {\n // provided site\n depSite = site\n callSite = callSiteLocation(stack[1])\n callSite.name = depSite.name\n file = callSite[0]\n } else {\n // get call site\n i = 2\n depSite = callSiteLocation(stack[i])\n callSite = depSite\n }\n\n // get caller of deprecated thing in relation to file\n for (; i < stack.length; i++) {\n caller = callSiteLocation(stack[i])\n callFile = caller[0]\n\n if (callFile === file) {\n seen = true\n } else if (callFile === this._file) {\n file = this._file\n } else if (seen) {\n break\n }\n }\n\n var key = caller\n ? depSite.join(':') + '__' + caller.join(':')\n : undefined\n\n if (key !== undefined && key in this._warned) {\n // already warned\n return\n }\n\n this._warned[key] = true\n\n // generate automatic message from call site\n var msg = message\n if (!msg) {\n msg = callSite === depSite || !callSite.name\n ? defaultMessage(depSite)\n : defaultMessage(callSite)\n }\n\n // emit deprecation if listeners exist\n if (haslisteners) {\n var err = DeprecationError(this._namespace, msg, stack.slice(i))\n process.emit('deprecation', err)\n return\n }\n\n // format and write message\n var format = process.stderr.isTTY\n ? formatColor\n : formatPlain\n var output = format.call(this, msg, caller, stack.slice(i))\n process.stderr.write(output + '\\n', 'utf8')\n}", "docstring": "/**\n * Display deprecation message.\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/depd/index.js#L185-L261", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Wordlist.split", "code": "split(mnemonic) {\n return mnemonic.toLowerCase().split(/ +/g);\n }", "docstring": "// Subclasses may override this", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/ethers/dist/ethers.esm.js#L15522-L15524", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "dotindex", "code": "function dotindex(value) {\n var match = lastDotRe.exec(value)\n\n return match ? match.index + 1 : value.length\n}", "docstring": "// Get the position of the last dot in `value`.", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/markdown-table/index.js#L246-L250", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "copyFromBufferString", "code": "function copyFromBufferString(n, list) {\n var p = list.head;\n var c = 1;\n var ret = p.data;\n n -= ret.length;\n while (p = p.next) {\n var str = p.data;\n var nb = n > str.length ? str.length : n;\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\n n -= nb;\n if (n === 0) {\n if (nb === str.length) {\n ++c;\n if (p.next) list.head = p.next;else list.head = list.tail = null;\n } else {\n list.head = p;\n p.data = str.slice(nb);\n }\n break;\n }\n ++c;\n }\n list.length -= c;\n return ret;\n }", "docstring": "// Copies a specified amount of characters from the list of buffered data", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/mocha/mocha.js#L4585-L4609", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sender.doPong", "code": "doPong(data, mask, readOnly, cb) {\n this.sendFrame(\n Sender.frame(data, {\n fin: true,\n rsv1: false,\n opcode: 0x0a,\n mask,\n readOnly\n }),\n cb\n );\n }", "docstring": "/**\n * Frames and sends a pong message.\n *\n * @param {Buffer} data The message to send\n * @param {Boolean} [mask=false] Specifies whether or not to mask `data`\n * @param {Boolean} [readOnly=false] Specifies whether `data` can be modified\n * @param {Function} [cb] Callback\n * @private\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/ws/lib/sender.js#L231-L242", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "socketOnError", "code": "function socketOnError() {\n this.destroy();\n}", "docstring": "/**\n * Handle premature socket errors.\n *\n * @private\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/ws/lib/websocket-server.js#L371-L373", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Progress", "code": "function Progress(runner, options) {\n Base.call(this, runner, options);\n\n var self = this;\n var width = (Base.window.width * 0.5) | 0;\n var total = runner.total;\n var complete = 0;\n var lastN = -1;\n\n // default chars\n options = options || {};\n var reporterOptions = options.reporterOptions || {};\n\n options.open = reporterOptions.open || '[';\n options.complete = reporterOptions.complete || '▬';\n options.incomplete = reporterOptions.incomplete || Base.symbols.dot;\n options.close = reporterOptions.close || ']';\n options.verbose = reporterOptions.verbose || false;\n\n // tests started\n runner.on(EVENT_RUN_BEGIN, function() {\n process.stdout.write('\\n');\n cursor.hide();\n });\n\n // tests complete\n runner.on(EVENT_TEST_END, function() {\n complete++;\n\n var percent = complete / total;\n var n = (width * percent) | 0;\n var i = width - n;\n\n if (n === lastN && !options.verbose) {\n // Don't re-render the line if it hasn't changed\n return;\n }\n lastN = n;\n\n cursor.CR();\n process.stdout.write('\\u001b[J');\n process.stdout.write(color('progress', ' ' + options.open));\n process.stdout.write(Array(n).join(options.complete));\n process.stdout.write(Array(i).join(options.incomplete));\n process.stdout.write(color('progress', options.close));\n if (options.verbose) {\n process.stdout.write(color('progress', ' ' + complete + ' of ' + total));\n }\n });\n\n // tests are complete, output some stats\n // and the failures if any\n runner.once(EVENT_RUN_END, function() {\n cursor.show();\n process.stdout.write('\\n');\n self.epilogue();\n });\n}", "docstring": "/**\n * Constructs a new `Progress` reporter instance.\n *\n * @public\n * @class\n * @memberof Mocha.reporters\n * @extends Mocha.reporters.Base\n * @param {Runner} runner - Instance triggers reporter actions.\n * @param {Object} [options] - runner options\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/eth-gas-reporter/node_modules/mocha/lib/reporters/progress.js#L40-L97", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getAllKeys", "code": "function getAllKeys(object) {\n return baseGetAllKeys(object, keys, getSymbols);\n }", "docstring": "/**\n * Creates an array of own enumerable property names and symbols of `object`.\n *\n * @private\n * @param {Object} object The object to query.\n * @returns {Array} Returns the array of property names and symbols.\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/lodash/lodash.js#L5915-L5917", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "memoizeCapped", "code": "function memoizeCapped(func) {\n var result = memoize(func, function(key) {\n if (cache.size === MAX_MEMOIZE_SIZE) {\n cache.clear();\n }\n return key;\n });\n\n var cache = result.cache;\n return result;\n }", "docstring": "/**\n * A specialized version of `_.memoize` which clears the memoized function's\n * cache when it exceeds `MAX_MEMOIZE_SIZE`.\n *\n * @private\n * @param {Function} func The function to have its output memoized.\n * @returns {Function} Returns the new memoized function.\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/lodash/lodash.js#L6488-L6498", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "baseLt", "code": "function baseLt(value, other) {\n return value < other;\n}", "docstring": "/**\n * The base implementation of `_.lt` which doesn't coerce arguments.\n *\n * @private\n * @param {*} value The value to compare.\n * @param {*} other The other value to compare.\n * @returns {boolean} Returns `true` if `value` is less than `other`,\n * else `false`.\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/lodash/_baseLt.js#L10-L12", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "httpFetch", "code": "async function httpFetch (fetchParams) {\n // 1. Let request be fetchParams’s request.\n const request = fetchParams.request\n\n // 2. Let response be null.\n let response = null\n\n // 3. Let actualResponse be null.\n let actualResponse = null\n\n // 4. Let timingInfo be fetchParams’s timing info.\n const timingInfo = fetchParams.timingInfo\n\n // 5. If request’s service-workers mode is \"all\", then:\n if (request.serviceWorkers === 'all') {\n // TODO\n }\n\n // 6. If response is null, then:\n if (response === null) {\n // 1. If makeCORSPreflight is true and one of these conditions is true:\n // TODO\n\n // 2. If request’s redirect mode is \"follow\", then set request’s\n // service-workers mode to \"none\".\n if (request.redirect === 'follow') {\n request.serviceWorkers = 'none'\n }\n\n // 3. Set response and actualResponse to the result of running\n // HTTP-network-or-cache fetch given fetchParams.\n actualResponse = response = await httpNetworkOrCacheFetch(fetchParams)\n\n // 4. If request’s response tainting is \"cors\" and a CORS check\n // for request and response returns failure, then return a network error.\n if (\n request.responseTainting === 'cors' &&\n corsCheck(request, response) === 'failure'\n ) {\n return makeNetworkError('cors failure')\n }\n\n // 5. If the TAO check for request and response returns failure, then set\n // request’s timing allow failed flag.\n if (TAOCheck(request, response) === 'failure') {\n request.timingAllowFailed = true\n }\n }\n\n // 7. If either request’s response tainting or response’s type\n // is \"opaque\", and the cross-origin resource policy check with\n // request’s origin, request’s client, request’s destination,\n // and actualResponse returns blocked, then return a network error.\n if (\n (request.responseTainting === 'opaque' || response.type === 'opaque') &&\n crossOriginResourcePolicyCheck(\n request.origin,\n request.client,\n request.destination,\n actualResponse\n ) === 'blocked'\n ) {\n return makeNetworkError('blocked')\n }\n\n // 8. If actualResponse’s status is a redirect status, then:\n if (redirectStatus.includes(actualResponse.status)) {\n // 1. If actualResponse’s status is not 303, request’s body is not null,\n // and the connection uses HTTP/2, then user agents may, and are even\n // encouraged to, transmit an RST_STREAM frame.\n // See, https://github.com/whatwg/fetch/issues/1288\n if (request.redirect !== 'manual') {\n fetchParams.controller.connection.destroy()\n }\n\n // 2. Switch on request’s redirect mode:\n if (request.redirect === 'error') {\n // Set response to a network error.\n response = makeNetworkError('unexpected redirect')\n } else if (request.redirect === 'manual') {\n // Set response to an opaque-redirect filtered response whose internal\n // response is actualResponse.\n // NOTE(spec): On the web this would return an `opaqueredirect` response,\n // but that doesn't make sense server side.\n // See https://github.com/nodejs/undici/issues/1193.\n response = actualResponse\n } else if (request.redirect === 'follow') {\n // Set response to the result of running HTTP-redirect fetch given\n // fetchParams and response.\n response = await httpRedirectFetch(fetchParams, response)\n } else {\n assert(false)\n }\n }\n\n // 9. Set response’s timing info to timingInfo.\n response.timingInfo = timingInfo\n\n // 10. Return response.\n return response\n}", "docstring": "// https://fetch.spec.whatwg.org/#http-fetch", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/undici/lib/fetch/index.js#L999-L1099", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Miner.mine", "code": "async mine(iterations = 0) {\n const solution = await this.iterate(iterations);\n if (solution) {\n if (this.block) {\n const data = this.block.toJSON();\n data.header.mixHash = solution.mixHash;\n data.header.nonce = solution.nonce;\n return ethereumjs_block_1.Block.fromBlockData(data, { common: this.block._common });\n }\n else {\n const data = this.blockHeader.toJSON();\n data.mixHash = solution.mixHash;\n data.nonce = solution.nonce;\n return ethereumjs_block_1.BlockHeader.fromHeaderData(data, { common: this.blockHeader._common });\n }\n }\n }", "docstring": "/**\n * Iterate `iterations` time over nonces, returns a `BlockHeader` or `Block` if a solution is found, `undefined` otherwise\n * @param iterations - Number of iterations to iterate over. If `-1` is passed, the loop runs until a solution is found\n * @returns - `undefined` if no solution was found within the iterations, or a `BlockHeader` or `Block`\n * with valid PoW based upon what was passed in the constructor\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/@nomicfoundation/ethereumjs-ethash/dist/index.js#L50-L66", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "trap", "code": "function trap(err) {\n // TODO: facilitate extra data along with errors\n throw new exceptions_1.EvmError(err);\n}", "docstring": "/**\n * Wraps error message as EvmError\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/@nomicfoundation/ethereumjs-evm/dist/opcodes/util.js#L27-L30", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DefaultStateManager.getProof", "code": "async getProof(address, storageSlots = []) {\n const account = await this.getAccount(address);\n const accountProof = (await this._trie.createProof(address.buf)).map((p) => (0, ethereumjs_util_1.bufferToHex)(p));\n const storageProof = [];\n const storageTrie = await this._getStorageTrie(address);\n for (const storageKey of storageSlots) {\n const proof = (await storageTrie.createProof(storageKey)).map((p) => (0, ethereumjs_util_1.bufferToHex)(p));\n let value = (0, ethereumjs_util_1.bufferToHex)(await this.getContractStorage(address, storageKey));\n if (value === '0x') {\n value = '0x0';\n }\n const proofItem = {\n key: (0, ethereumjs_util_1.bufferToHex)(storageKey),\n value,\n proof,\n };\n storageProof.push(proofItem);\n }\n const returnValue = {\n address: address.toString(),\n balance: (0, ethereumjs_util_1.bigIntToHex)(account.balance),\n codeHash: (0, ethereumjs_util_1.bufferToHex)(account.codeHash),\n nonce: (0, ethereumjs_util_1.bigIntToHex)(account.nonce),\n storageHash: (0, ethereumjs_util_1.bufferToHex)(account.storageRoot),\n accountProof,\n storageProof,\n };\n return returnValue;\n }", "docstring": "/**\n * Get an EIP-1186 proof\n * @param address address to get proof of\n * @param storageSlots storage slots to get proof of\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/@nomicfoundation/ethereumjs-statemanager/dist/stateManager.js#L250-L278", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "generatorEqual", "code": "function generatorEqual(leftHandOperand, rightHandOperand, options) {\n return iterableEqual(getGeneratorEntries(leftHandOperand), getGeneratorEntries(rightHandOperand), options);\n}", "docstring": "/*!\n * Simple equality for generator objects such as those returned by generator functions.\n *\n * @param {Iterable} leftHandOperand\n * @param {Iterable} rightHandOperand\n * @param {Object} [options] (Optional)\n * @return {Boolean} result\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/deep-eql/index.js#L330-L332", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "errorDiff", "code": "function errorDiff(actual, expected) {\n return diff\n .diffWordsWithSpace(actual, expected)\n .map(function(str) {\n if (str.added) {\n return colorLines('diff added', str.value);\n }\n if (str.removed) {\n return colorLines('diff removed', str.value);\n }\n return str.value;\n })\n .join('');\n}", "docstring": "/**\n * Returns character diff for `err`.\n *\n * @private\n * @param {String} actual\n * @param {String} expected\n * @return {string} the diff\n */", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/solidity-coverage/node_modules/mocha/mocha.js#L2836-L2849", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "_interopRequireDefault", "code": "function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }", "docstring": "/*istanbul ignore start*/", "url": "https://github.com/rdubois-crypto/FreshCryptoLib/blob/8179e08cac72072bd260796633fec41fdfd5b441/solidity/tests/hardhat/node_modules/solidity-coverage/node_modules/mocha/mocha.js#L11230-L11230", "sha": "8179e08cac72072bd260796633fec41fdfd5b441"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TorusGeometry", "code": "function TorusGeometry( radius, tube, radialSegments, tubularSegments, arc ) {\n\n\t\tGeometry.call( this );\n\n\t\tthis.type = 'TorusGeometry';\n\n\t\tthis.parameters = {\n\t\t\tradius: radius,\n\t\t\ttube: tube,\n\t\t\tradialSegments: radialSegments,\n\t\t\ttubularSegments: tubularSegments,\n\t\t\tarc: arc\n\t\t};\n\n\t\tthis.fromBufferGeometry( new TorusBufferGeometry( radius, tube, radialSegments, tubularSegments, arc ) );\n\t\tthis.mergeVertices();\n\n\t}", "docstring": "/**\n\t * @author oosmoxiecode\n\t * @author mrdoob / http://mrdoob.com/\n\t * @author Mugen87 / https://github.com/Mugen87\n\t */", "url": "https://github.com/ashawkey/torch-merf/blob/a669be605349c3af5167832f8ead6f69bbf8e697/renderer/third_party/three.js#L29523-L29540", "sha": "a669be605349c3af5167832f8ead6f69bbf8e697"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TokenTreeEmitter.addSublanguage", "code": "addSublanguage(emitter, name) {\n /** @type DataNode */\n const node = emitter.root;\n node.kind = name;\n node.sublanguage = true;\n this.add(node);\n }", "docstring": "/**\n * @param {Emitter & {root: DataNode}} emitter\n * @param {string} name\n */", "url": "https://github.com/nineya/halo-theme-dream2.0/blob/3fdaf6a59b976a36962e905cc230bdbf2a04708f/templates/assets/lib/highlightjs@11.5.1/es/highlight.js#L346-L352", "sha": "3fdaf6a59b976a36962e905cc230bdbf2a04708f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "dateTimeDeserializer", "code": "function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }", "docstring": "// get the result from the body", "url": "https://github.com/aidar-freeed/ai-codereviewer/blob/a9a064dfa1db8c83f40ef63f6e247fa09c935ed6/dist/index.js#L1877-L1885", "sha": "a9a064dfa1db8c83f40ef63f6e247fa09c935ed6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AbortController.signal", "code": "get signal() {\n return getSignal(this);\n }", "docstring": "/**\n * Returns the `AbortSignal` object associated with this object.\n */", "url": "https://github.com/aidar-freeed/ai-codereviewer/blob/a9a064dfa1db8c83f40ef63f6e247fa09c935ed6/dist/index.js#L4339-L4341", "sha": "a9a064dfa1db8c83f40ef63f6e247fa09c935ed6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReadableStreamDefaultController.desiredSize", "code": "get desiredSize() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('desiredSize');\n }\n return ReadableStreamDefaultControllerGetDesiredSize(this);\n }", "docstring": "/**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying source ought to use this information to determine when and how to apply backpressure.\n */", "url": "https://github.com/aidar-freeed/ai-codereviewer/blob/a9a064dfa1db8c83f40ef63f6e247fa09c935ed6/dist/index.js#L12319-L12324", "sha": "a9a064dfa1db8c83f40ef63f6e247fa09c935ed6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Ellipse$1", "code": "function Ellipse$1(rx,ry,ax){if(!(this instanceof Ellipse$1)){return new Ellipse$1(rx,ry,ax);}this.rx=rx;this.ry=ry;this.ax=ax;}// Apply a linear transform m to the ellipse", "docstring": "// Class constructor :", "url": "https://github.com/GeorgLegato/stable-diffusion-webui-vectorstudio/blob/03535f64caef1de151122574cf4a39943db6e1eb/scripts/editor/Editor.js#L19848-L19848", "sha": "03535f64caef1de151122574cf4a39943db6e1eb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MainMenu.savePreferences", "code": "async savePreferences(e) {\n const {\n lang,\n bgcolor,\n bgurl,\n gridsnappingon,\n gridsnappingstep,\n gridcolor,\n showrulers,\n baseunit\n } = e.detail;\n // Set background\n this.editor.setBackground(bgcolor, bgurl);\n\n // set language\n if (lang && lang !== this.editor.configObj.pref('lang')) {\n this.editor.configObj.pref('lang', lang);\n seAlert('Changing the language needs reload');\n }\n\n // set grid setting\n this.editor.configObj.curConfig.gridSnapping = gridsnappingon;\n this.editor.configObj.curConfig.snappingStep = gridsnappingstep;\n this.editor.configObj.curConfig.gridColor = gridcolor;\n this.editor.configObj.curConfig.showRulers = showrulers;\n if (this.editor.configObj.curConfig.showRulers) {\n this.editor.rulers.updateRulers();\n }\n this.editor.configObj.curConfig.baseUnit = baseunit;\n this.editor.svgCanvas.setConfig(this.editor.configObj.curConfig);\n this.editor.updateCanvas();\n this.hidePreferences();\n }", "docstring": "/**\n * Save user preferences based on current values in the UI.\n * @param {Event} e\n * @function module:SVGthis.savePreferences\n * @returns {Promise}\n */", "url": "https://github.com/GeorgLegato/stable-diffusion-webui-vectorstudio/blob/03535f64caef1de151122574cf4a39943db6e1eb/scripts/editor/iife-Editor.js#L33348-L33380", "sha": "03535f64caef1de151122574cf4a39943db6e1eb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "keyDown", "code": "function keyDown(e) {\n if (e.target.value === '' && e.target !== hex && (bindedHex && e.target !== bindedHex || !bindedHex)) return undefined;\n if (!validateKey(e)) return e;\n switch (e.target) {\n case red:\n switch (e.keyCode) {\n case 38:\n red.value = setValueInRange.call(that, (red.value << 0) + 1, 0, 255);\n color.val('r', red.value, e.target);\n return false;\n case 40:\n red.value = setValueInRange.call(that, (red.value << 0) - 1, 0, 255);\n color.val('r', red.value, e.target);\n return false;\n }\n break;\n case green:\n switch (e.keyCode) {\n case 38:\n green.value = setValueInRange.call(that, (green.value << 0) + 1, 0, 255);\n color.val('g', green.value, e.target);\n return false;\n case 40:\n green.value = setValueInRange.call(that, (green.value << 0) - 1, 0, 255);\n color.val('g', green.value, e.target);\n return false;\n }\n break;\n case blue:\n switch (e.keyCode) {\n case 38:\n blue.value = setValueInRange.call(that, (blue.value << 0) + 1, 0, 255);\n color.val('b', blue.value, e.target);\n return false;\n case 40:\n blue.value = setValueInRange.call(that, (blue.value << 0) - 1, 0, 255);\n color.val('b', blue.value, e.target);\n return false;\n }\n break;\n case alpha:\n switch (e.keyCode) {\n case 38:\n alpha.value = setValueInRange.call(that, Number.parseFloat(alpha.value) + 1, 0, 100);\n color.val('a', toFixedNumeric$1(alpha.value * 255 / 100, alphaPrecision), e.target);\n return false;\n case 40:\n alpha.value = setValueInRange.call(that, Number.parseFloat(alpha.value) - 1, 0, 100);\n color.val('a', toFixedNumeric$1(alpha.value * 255 / 100, alphaPrecision), e.target);\n return false;\n }\n break;\n case hue:\n switch (e.keyCode) {\n case 38:\n hue.value = setValueInRange.call(that, (hue.value << 0) + 1, 0, 360);\n color.val('h', hue.value, e.target);\n return false;\n case 40:\n hue.value = setValueInRange.call(that, (hue.value << 0) - 1, 0, 360);\n color.val('h', hue.value, e.target);\n return false;\n }\n break;\n case saturation:\n switch (e.keyCode) {\n case 38:\n saturation.value = setValueInRange.call(that, (saturation.value << 0) + 1, 0, 100);\n color.val('s', saturation.value, e.target);\n return false;\n case 40:\n saturation.value = setValueInRange.call(that, (saturation.value << 0) - 1, 0, 100);\n color.val('s', saturation.value, e.target);\n return false;\n }\n break;\n case value:\n switch (e.keyCode) {\n case 38:\n value.value = setValueInRange.call(that, (value.value << 0) + 1, 0, 100);\n color.val('v', value.value, e.target);\n return false;\n case 40:\n value.value = setValueInRange.call(that, (value.value << 0) - 1, 0, 100);\n color.val('v', value.value, e.target);\n return false;\n }\n break;\n }\n return undefined;\n }", "docstring": "// input box key down - use arrows to alter color", "url": "https://github.com/GeorgLegato/stable-diffusion-webui-vectorstudio/blob/03535f64caef1de151122574cf4a39943db6e1eb/scripts/editor/xdomain-Editor.js#L22339-L22429", "sha": "03535f64caef1de151122574cf4a39943db6e1eb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "uncurryThis", "code": "function uncurryThis( fn ) {\n return function() {\n return call.apply( fn, arguments );\n };\n }", "docstring": "// http://jsperf.com/uncurrythis", "url": "https://github.com/hiouttime/dujiaoka/blob/51538b6a7e74a67a4f92a242b1e3c6799b64f5ca/public/vendor/dcat-admin/dcat/plugins/webuploader/webuploader.nolog.js#L205-L209", "sha": "51538b6a7e74a67a4f92a242b1e3c6799b64f5ca"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getStreamingResponseStatusCodes", "code": "function getStreamingResponseStatusCodes(operationSpec) {\n const result = new Set();\n for (const statusCode in operationSpec.responses) {\n const operationResponse = operationSpec.responses[statusCode];\n if (operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Stream) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}", "docstring": "/**\n * Gets the list of status codes for streaming responses.\n * @internal\n */", "url": "https://github.com/GitHubSecurityLab/actions-permissions/blob/babd69bc8d78e6cdece903dfdcfb72d4e1a4f00d/monitor/dist/index.js#L105800-L105810", "sha": "babd69bc8d78e6cdece903dfdcfb72d4e1a4f00d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "array_default", "code": "function array_default(a4, b) {\n return (isNumberArray(b) ? numberArray_default : genericArray)(a4, b);\n}", "docstring": "// node_modules/d3-interpolate/src/array.js", "url": "https://github.com/juba/pyobsplot/blob/4ae57b857091989d7f2911e052b9f3ad636e6fea/src/pyobsplot/static/static-widget.js#L4558-L4560", "sha": "4ae57b857091989d7f2911e052b9f3ad636e6fea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "conicProjection", "code": "function conicProjection(projectAt) {\n var phi02 = 0, phi12 = pi4 / 3, m3 = projectionMutator(projectAt), p = m3(phi02, phi12);\n p.parallels = function(_) {\n return arguments.length ? m3(phi02 = _[0] * radians2, phi12 = _[1] * radians2) : [phi02 * degrees3, phi12 * degrees3];\n };\n return p;\n}", "docstring": "// node_modules/d3-geo/src/projection/conic.js", "url": "https://github.com/juba/pyobsplot/blob/4ae57b857091989d7f2911e052b9f3ad636e6fea/src/pyobsplot/static/static-widget.js#L12209-L12215", "sha": "4ae57b857091989d7f2911e052b9f3ad636e6fea"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WebMidi", "code": "function WebMidi() {\n\n // Singleton. Prevent instantiation through WebMidi.__proto__.constructor()\n if (WebMidi.prototype._singleton) {\n throw new Error(\"WebMidi is a singleton, it cannot be instantiated directly.\");\n }\n WebMidi.prototype._singleton = this;\n\n // MIDI inputs and outputs\n this._inputs = [];\n this._outputs = [];\n\n // Object to hold all user-defined handlers for interface-wide events (connected, disconnected,\n // etc.)\n this._userHandlers = {};\n\n // Array of statechange events to process. These events must be parsed synchronously so they do\n // not override each other.\n this._stateChangeQueue = [];\n\n // Indicates whether we are currently processing a statechange event (in which case new events\n // are to be queued).\n this._processingStateChange = false;\n\n // Events triggered at the interface level (WebMidi)\n this._midiInterfaceEvents = [\"connected\", \"disconnected\"];\n\n // the current nrpns being constructed, by channel\n this._nrpnBuffer = [[],[],[],[], [],[],[],[], [],[],[],[], [],[],[],[]];\n\n // Enable/Disable NRPN event dispatch\n this._nrpnEventsEnabled = true;\n\n // NRPN message types\n this._nrpnTypes = [\"entry\", \"increment\", \"decrement\"];\n\n // Notes and semitones for note guessing\n this._notes = [\"C\", \"C#\", \"D\", \"D#\", \"E\", \"F\", \"F#\", \"G\", \"G#\", \"A\", \"A#\", \"B\"];\n this._semitones = {C: 0, D: 2, E: 4, F: 5, G: 7, A: 9, B: 11 };\n\n // Define some \"static\" properties\n Object.defineProperties(this, {\n\n /**\n * [read-only] List of valid MIDI system messages and matching hexadecimal values.\n *\n * Note: values 249 and 253 are actually dispatched by the Web MIDI API but the MIDI 1.0 does\n * not say what they are used for. About those values, it only states: undefined (reserved)\n *\n * @property MIDI_SYSTEM_MESSAGES\n * @type Object\n * @static\n *\n * @since 2.0.0\n */\n MIDI_SYSTEM_MESSAGES: {\n value: {\n\n // System common messages\n sysex: 0xF0, // 240\n timecode: 0xF1, // 241\n songposition: 0xF2, // 242\n songselect: 0xF3, // 243\n tuningrequest: 0xF6, // 246\n sysexend: 0xF7, // 247 (never actually received - simply ends a sysex)\n\n // System real-time messages\n clock: 0xF8, // 248\n start: 0xFA, // 250\n continue: 0xFB, // 251\n stop: 0xFC, // 252\n activesensing: 0xFE, // 254\n reset: 0xFF, // 255\n\n // Custom WebMidi.js messages\n midimessage: 0,\n unknownsystemmessage: -1\n },\n writable: false,\n enumerable: true,\n configurable: false\n },\n\n /**\n * [read-only] An object containing properties for each MIDI channel messages and their\n * associated hexadecimal value.\n *\n * @property MIDI_CHANNEL_MESSAGES\n * @type Object\n * @static\n *\n * @since 2.0.0\n */\n MIDI_CHANNEL_MESSAGES: {\n value: {\n noteoff: 0x8, // 8\n noteon: 0x9, // 9\n keyaftertouch: 0xA, // 10\n controlchange: 0xB, // 11\n channelmode: 0xB, // 11\n nrpn: 0xB, // 11\n programchange: 0xC, // 12\n channelaftertouch: 0xD, // 13\n pitchbend: 0xE // 14\n },\n writable: false,\n enumerable: true,\n configurable: false\n },\n\n /**\n * [read-only] An object containing properties for each registered parameters and their\n * associated pair of hexadecimal values. MIDI registered parameters extend the original list\n * of control change messages (a.k.a. CC messages). Currently, there are only a limited number\n * of them.\n *\n * @property MIDI_REGISTERED_PARAMETER\n * @type Object\n * @static\n *\n * @since 2.0.0\n */\n MIDI_REGISTERED_PARAMETER: {\n value: {\n pitchbendrange: [0x00, 0x00],\n channelfinetuning: [0x00, 0x01],\n channelcoarsetuning: [0x00, 0x02],\n tuningprogram: [0x00, 0x03],\n tuningbank: [0x00, 0x04],\n modulationrange: [0x00, 0x05],\n\n azimuthangle: [0x3D, 0x00],\n elevationangle: [0x3D, 0x01],\n gain: [0x3D, 0x02],\n distanceratio: [0x3D, 0x03],\n maximumdistance: [0x3D, 0x04],\n maximumdistancegain: [0x3D, 0x05],\n referencedistanceratio: [0x3D, 0x06],\n panspreadangle: [0x3D, 0x07],\n rollangle: [0x3D, 0x08]\n },\n writable: false,\n enumerable: true,\n configurable: false\n },\n\n /**\n * [read-only] An object containing properties for each MIDI control change messages (a.k.a.\n * CC messages) and their associated hexadecimal value.\n *\n * @property MIDI_CONTROL_CHANGE_MESSAGES\n * @type Object\n * @static\n *\n * @since 2.0.0\n */\n MIDI_CONTROL_CHANGE_MESSAGES: {\n value: {\n bankselectcoarse: 0,\n modulationwheelcoarse: 1,\n breathcontrollercoarse: 2,\n footcontrollercoarse: 4,\n portamentotimecoarse: 5,\n dataentrycoarse: 6,\n volumecoarse: 7,\n balancecoarse: 8,\n pancoarse: 10,\n expressioncoarse: 11,\n effectcontrol1coarse: 12,\n effectcontrol2coarse: 13,\n generalpurposeslider1: 16,\n generalpurposeslider2: 17,\n generalpurposeslider3: 18,\n generalpurposeslider4: 19,\n bankselectfine: 32,\n modulationwheelfine: 33,\n breathcontrollerfine: 34,\n footcontrollerfine: 36,\n portamentotimefine: 37,\n dataentryfine: 38,\n volumefine: 39,\n balancefine: 40,\n panfine: 42,\n expressionfine: 43,\n effectcontrol1fine: 44,\n effectcontrol2fine: 45,\n holdpedal: 64,\n portamento: 65,\n sustenutopedal: 66,\n softpedal: 67,\n legatopedal: 68,\n hold2pedal: 69,\n soundvariation: 70,\n resonance: 71,\n soundreleasetime: 72,\n soundattacktime: 73,\n brightness: 74,\n soundcontrol6: 75,\n soundcontrol7: 76,\n soundcontrol8: 77,\n soundcontrol9: 78,\n soundcontrol10: 79,\n generalpurposebutton1: 80,\n generalpurposebutton2: 81,\n generalpurposebutton3: 82,\n generalpurposebutton4: 83,\n reverblevel: 91,\n tremololevel: 92,\n choruslevel: 93,\n celestelevel: 94,\n phaserlevel: 95,\n databuttonincrement: 96,\n databuttondecrement: 97,\n nonregisteredparametercoarse: 98,\n nonregisteredparameterfine: 99,\n registeredparametercoarse: 100,\n registeredparameterfine: 101\n },\n writable: false,\n enumerable: true,\n configurable: false\n },\n\n /**\n * [read-only] An object containing properties for MIDI control change messages\n * that make up NRPN messages\n *\n * @property MIDI_NRPN_MESSAGES\n * @type Object\n * @static\n *\n * @since 2.0.0\n */\n MIDI_NRPN_MESSAGES: {\n value: {\n entrymsb: 6,\n entrylsb: 38,\n increment: 96,\n decrement: 97,\n paramlsb: 98,\n parammsb: 99,\n nullactiveparameter: 127\n },\n writable: false,\n enumerable: true,\n configurable: false\n },\n\n /**\n * [read-only] List of MIDI channel mode messages as defined in the official MIDI\n * specification.\n *\n * @property MIDI_CHANNEL_MODE_MESSAGES\n * @type Object\n * @static\n *\n * @since 2.0.0\n */\n MIDI_CHANNEL_MODE_MESSAGES: {\n value: {\n allsoundoff: 120,\n resetallcontrollers: 121,\n localcontrol: 122,\n allnotesoff: 123,\n omnimodeoff: 124,\n omnimodeon: 125,\n monomodeon: 126,\n polymodeon: 127\n },\n writable: false,\n enumerable: true,\n configurable: false\n },\n\n /**\n * An integer to offset the octave both in inbound and outbound messages. By default, middle C\n * (MIDI note number 60) is placed on the 4th octave (C4).\n *\n * If, for example, `octaveOffset` is set to 2, MIDI note number 60 will be reported as C6. If\n * `octaveOffset` is set to -1, MIDI note number 60 will be reported as C3.\n *\n * @property octaveOffset\n * @type Number\n * @static\n *\n * @since 2.1\n */\n octaveOffset: {\n value: 0,\n writable: true,\n enumerable: true,\n configurable: false\n }\n\n });\n\n // Define getters/setters\n Object.defineProperties(this, {\n\n /**\n * [read-only] Indicates whether the environment supports the Web MIDI API or not.\n *\n * Note: in environments that do not offer built-in MIDI support, this will report true if the\n * `navigator.requestMIDIAccess` function is available. For example, if you have installed\n * WebMIDIAPIShim but no plugin, this property will be true even though actual support might\n * not be there.\n *\n * @property supported\n * @type Boolean\n * @static\n */\n supported: {\n enumerable: true,\n get: function() {\n return \"requestMIDIAccess\" in navigator;\n }\n },\n\n /**\n * [read-only] Indicates whether the interface to the host\"s MIDI subsystem is currently\n * enabled.\n *\n * @property enabled\n * @type Boolean\n * @static\n */\n enabled: {\n enumerable: true,\n get: function() {\n return this.interface !== undefined;\n }.bind(this)\n },\n\n /**\n * [read-only] An array of all currently available MIDI input ports.\n *\n * @property inputs\n * @type {Array}\n * @static\n */\n inputs: {\n enumerable: true,\n get: function() {\n return this._inputs;\n }.bind(this)\n },\n\n /**\n * [read-only] An array of all currently available MIDI output ports.\n *\n * @property outputs\n * @type {Array}\n * @static\n */\n outputs: {\n enumerable: true,\n get: function() {\n return this._outputs;\n }.bind(this)\n },\n\n /**\n * [read-only] Indicates whether the interface to the host\"s MIDI subsystem is currently\n * active.\n *\n * @property sysexEnabled\n * @type Boolean\n * @static\n */\n sysexEnabled: {\n enumerable: true,\n get: function() {\n return !!(this.interface && this.interface.sysexEnabled);\n }.bind(this)\n },\n\n /**\n * [read-only] Indicates whether WebMidi should dispatch Non-Registered\n * Parameter Number events (which are generally groups of CC messages)\n * If correct sequences of CC messages are received, NRPN events will\n * fire. The first out of order NRPN CC will fall through the collector\n * logic and all CC messages buffered will be discarded as incomplete.\n *\n * @private\n *\n * @property nrpnEventsEnabled\n * @type Boolean\n * @static\n */\n nrpnEventsEnabled: {\n enumerable: true,\n get: function() {\n return !!(this._nrpnEventsEnabled);\n }.bind(this),\n set: function(enabled) {\n this._nrpnEventsEnabled = enabled;\n return this._nrpnEventsEnabled;\n }\n },\n\n /**\n * [read-only] NRPN message types\n *\n * @property nrpnTypes\n * @type Array\n * @static\n */\n nrpnTypes: {\n enumerable: true,\n get: function() {\n return this._nrpnTypes;\n }.bind(this)\n },\n\n /**\n * [read-only] Current MIDI performance time in milliseconds. This can be used to queue events\n * in the future.\n *\n * @property time\n * @type DOMHighResTimeStamp\n * @static\n */\n time: {\n enumerable: true,\n get: function() {\n return performance.now();\n }\n }\n\n });\n\n }", "docstring": "/**\n * The `WebMidi` object makes it easier to work with the Web MIDI API. Basically, it simplifies\n * two things: sending outgoing MIDI messages and reacting to incoming MIDI messages.\n *\n * Sending MIDI messages is done via an `Output` object. All available outputs can be accessed in\n * the `WebMidi.outputs` array. There is one `Output` object for each output port available on\n * your system. Similarly, reacting to MIDI messages as they are coming in is simply a matter of\n * adding a listener to an `Input` object. Similarly, all inputs can be found in the\n * `WebMidi.inputs` array.\n *\n * Please note that a single hardware device might create more than one input and/or output ports.\n *\n * #### Sending messages\n *\n * To send MIDI messages, you simply need to call the desired method (`playNote()`,\n * `sendPitchBend()`, `stopNote()`, etc.) from an `Output` object and pass in the appropriate\n * parameters. All the native MIDI communication will be handled for you. The only additional\n * thing that needs to be done is to first enable `WebMidi`. Here is an example:\n *\n * WebMidi.enable(function(err) {\n * if (err) console.log(\"An error occurred\", err);\n * WebMidi.outputs[0].playNote(\"C3\");\n * });\n *\n * The code above, calls the `WebMidi.enable()` method. Upon success, this method executes the\n * callback function specified as a parameter. In this case, the callback calls the `playnote()`\n * function to play a 3rd octave C on the first available output port.\n *\n * #### Receiving messages\n *\n * Receiving messages is just as easy. You simply have to set a callback function to be triggered\n * when a specific MIDI message is received. For example, here\"s how to listen for pitch bend\n * events on the first input port:\n *\n * WebMidi.enable(function(err) {\n * if (err) console.log(\"An error occurred\", err);\n *\n * WebMidi.inputs[0].addListener(\"pitchbend\", \"all\", function(e) {\n * console.log(\"Pitch value: \" + e.value);\n * });\n *\n * });\n *\n * As you can see, this library is much easier to use than the native Web MIDI API. No need to\n * manually craft or decode binary MIDI messages anymore!\n *\n * @class WebMidi\n * @static\n *\n * @throws Error WebMidi is a singleton, it cannot be instantiated directly.\n *\n * @todo Switch away from yuidoc (deprecated) to be able to serve doc over https\n * @todo Yuidoc does not allow multiple exceptions (@throws) for a single method ?!\n *\n */", "url": "https://github.com/wiwikuan/pianometer/blob/4122760cd710a873b12ab4d7740cf36c50df4a6a/webmidi.js#L60-L491", "sha": "4122760cd710a873b12ab4d7740cf36c50df4a6a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Router.addCacheListener", "code": "addCacheListener() {\n // See https://github.com/Microsoft/TypeScript/issues/28357#issuecomment-436484705\n self.addEventListener('message', event => {\n // event.data is type 'any'\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n if (event.data && event.data.type === 'CACHE_URLS') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment\n const {\n payload\n } = event.data;\n {\n logger.debug(`Caching URLs from the window`, payload.urlsToCache);\n }\n const requestPromises = Promise.all(payload.urlsToCache.map(entry => {\n if (typeof entry === 'string') {\n entry = [entry];\n }\n const request = new Request(...entry);\n return this.handleRequest({\n request,\n event\n });\n // TODO(philipwalton): TypeScript errors without this typecast for\n // some reason (probably a bug). The real type here should work but\n // doesn't: `Array | undefined>`.\n })); // TypeScript\n event.waitUntil(requestPromises);\n // If a MessageChannel was used, reply to the message on success.\n if (event.ports && event.ports[0]) {\n void requestPromises.then(() => event.ports[0].postMessage(true));\n }\n }\n });\n }", "docstring": "/**\n * Adds a message event listener for URLs to cache from the window.\n * This is useful to cache resources loaded on the page prior to when the\n * service worker started controlling it.\n *\n * The format of the message data sent from the window should be as follows.\n * Where the `urlsToCache` array may consist of URL strings or an array of\n * URL string + `requestInit` object (the same as you'd pass to `fetch()`).\n *\n * ```\n * {\n * type: 'CACHE_URLS',\n * payload: {\n * urlsToCache: [\n * './script1.js',\n * './script2.js',\n * ['./script3.js', {mode: 'no-cors'}],\n * ],\n * },\n * }\n * ```\n */", "url": "https://github.com/qwacko/sveltekit-lucia-starter/blob/2c01cb76c8607c2a9f4e80f78dfd1e9def43768e/dev-dist/workbox-39884a30.js#L748-L781", "sha": "2c01cb76c8607c2a9f4e80f78dfd1e9def43768e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "waitUntil", "code": "function waitUntil(event, asyncFn) {\n const returnPromise = asyncFn();\n event.waitUntil(returnPromise);\n return returnPromise;\n }", "docstring": "/*\n Copyright 2020 Google LLC\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n */", "url": "https://github.com/qwacko/sveltekit-lucia-starter/blob/2c01cb76c8607c2a9f4e80f78dfd1e9def43768e/dev-dist/workbox-39884a30.js#L1247-L1251", "sha": "2c01cb76c8607c2a9f4e80f78dfd1e9def43768e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "PrecacheController.getCachedURLs", "code": "getCachedURLs() {\n return [...this._urlsToCacheKeys.keys()];\n }", "docstring": "/**\n * Returns a list of all the URLs that have been precached by the current\n * service worker.\n *\n * @return {Array} The precached URLs.\n */", "url": "https://github.com/qwacko/sveltekit-lucia-starter/blob/2c01cb76c8607c2a9f4e80f78dfd1e9def43768e/dev-dist/workbox-dcc48fc1.js#L2854-L2856", "sha": "2c01cb76c8607c2a9f4e80f78dfd1e9def43768e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isEmpty$1", "code": "function isEmpty$1(value) {\n if (value == null) {\n return true;\n }\n if (isArrayLike(value) && (isArray$3(value) || typeof value == 'string' || typeof value.splice == 'function' || isBuffer(value) || isTypedArray(value) || isArguments$1(value))) {\n return !value.length;\n }\n var tag = getTag(value);\n if (tag == mapTag || tag == setTag) {\n return !value.size;\n }\n if (isPrototype(value)) {\n return !baseKeys(value).length;\n }\n for (var key in value) {\n if (hasOwnProperty.call(value, key)) {\n return false;\n }\n }\n return true;\n }", "docstring": "/**\n * Checks if `value` is an empty object, collection, map, or set.\n *\n * Objects are considered empty if they have no own enumerable string keyed\n * properties.\n *\n * Array-like values such as `arguments` objects, arrays, buffers, strings, or\n * jQuery-like collections are considered empty if they have a `length` of `0`.\n * Similarly, maps and sets are considered empty if they have a `size` of `0`.\n *\n * @static\n * @memberOf _\n * @since 0.1.0\n * @category Lang\n * @param {*} value The value to check.\n * @returns {boolean} Returns `true` if `value` is empty, else `false`.\n * @example\n *\n * _.isEmpty(null);\n * // => true\n *\n * _.isEmpty(true);\n * // => true\n *\n * _.isEmpty(1);\n * // => true\n *\n * _.isEmpty([1, 2, 3]);\n * // => false\n *\n * _.isEmpty({ 'a': 1 });\n * // => false\n */", "url": "https://github.com/guidone/lets-form/blob/793271834cca296f28b6321d258821b8486061f1/dist/react-antd-umd/main.js#L4089-L4109", "sha": "793271834cca296f28b6321d258821b8486061f1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "initCloneArray$1", "code": "function initCloneArray$1(array) {\n var length = array.length,\n result = new array.constructor(length);\n\n // Add properties assigned by `RegExp#exec`.\n if (length && typeof array[0] == 'string' && hasOwnProperty$2.call(array, 'index')) {\n result.index = array.index;\n result.input = array.input;\n }\n return result;\n }", "docstring": "/**\n * Initializes an array clone.\n *\n * @private\n * @param {Array} array The array to clone.\n * @returns {Array} Returns the initialized clone.\n */", "url": "https://github.com/guidone/lets-form/blob/793271834cca296f28b6321d258821b8486061f1/dist/react-bootstrap-umd/main.js#L2771-L2781", "sha": "793271834cca296f28b6321d258821b8486061f1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "stringToArray$2", "code": "function stringToArray$2(string) {\n return hasUnicode$1(string) ? unicodeToArray(string) : asciiToArray(string);\n }", "docstring": "/**\n * Converts `string` to an array.\n *\n * @private\n * @param {string} string The string to convert.\n * @returns {Array} Returns the converted array.\n */", "url": "https://github.com/guidone/lets-form/blob/793271834cca296f28b6321d258821b8486061f1/dist/react-bootstrap-umd/main.js#L19958-L19960", "sha": "793271834cca296f28b6321d258821b8486061f1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "memoize$1", "code": "function memoize$1(func, resolver) {\n if (typeof func != 'function' || resolver != null && typeof resolver != 'function') {\n throw new TypeError(FUNC_ERROR_TEXT);\n }\n var memoized = function () {\n var args = arguments,\n key = resolver ? resolver.apply(this, args) : args[0],\n cache = memoized.cache;\n if (cache.has(key)) {\n return cache.get(key);\n }\n var result = func.apply(this, args);\n memoized.cache = cache.set(key, result) || cache;\n return result;\n };\n memoized.cache = new (memoize$1.Cache || MapCache$1)();\n return memoized;\n }", "docstring": "/**\n * Creates a function that memoizes the result of `func`. If `resolver` is\n * provided, it determines the cache key for storing the result based on the\n * arguments provided to the memoized function. By default, the first argument\n * provided to the memoized function is used as the map cache key. The `func`\n * is invoked with the `this` binding of the memoized function.\n *\n * **Note:** The cache is exposed as the `cache` property on the memoized\n * function. Its creation may be customized by replacing the `_.memoize.Cache`\n * constructor with one whose instances implement the\n * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object)\n * method interface of `clear`, `delete`, `get`, `has`, and `set`.\n *\n * @static\n * @memberOf _\n * @since 0.1.0\n * @category Function\n * @param {Function} func The function to have its output memoized.\n * @param {Function} [resolver] The function to resolve the cache key.\n * @returns {Function} Returns the new memoized function.\n * @example\n *\n * var object = { 'a': 1, 'b': 2 };\n * var other = { 'c': 3, 'd': 4 };\n *\n * var values = _.memoize(_.values);\n * values(object);\n * // => [1, 2]\n *\n * values(other);\n * // => [3, 4]\n *\n * object.a = 2;\n * values(object);\n * // => [1, 2]\n *\n * // Modify the result cache.\n * values.cache.set(object, ['a', 'b']);\n * values(object);\n * // => ['a', 'b']\n *\n * // Replace `_.memoize.Cache`.\n * _.memoize.Cache = WeakMap;\n */", "url": "https://github.com/guidone/lets-form/blob/793271834cca296f28b6321d258821b8486061f1/dist/react-rsuite5-umd/main.js#L3333-L3350", "sha": "793271834cca296f28b6321d258821b8486061f1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "baseFindIndex$1", "code": "function baseFindIndex$1(array, predicate, fromIndex, fromRight) {\n var length = array.length,\n index = fromIndex + (fromRight ? 1 : -1);\n while (fromRight ? index-- : ++index < length) {\n if (predicate(array[index], index, array)) {\n return index;\n }\n }\n return -1;\n}", "docstring": "/**\n * The base implementation of `_.findIndex` and `_.findLastIndex` without\n * support for iteratee shorthands.\n *\n * @private\n * @param {Array} array The array to inspect.\n * @param {Function} predicate The function invoked per iteration.\n * @param {number} fromIndex The index to search from.\n * @param {boolean} [fromRight] Specify iterating from right to left.\n * @returns {number} Returns the index of the matched value, else `-1`.\n */", "url": "https://github.com/guidone/lets-form/blob/793271834cca296f28b6321d258821b8486061f1/dist/utils-esm/index.js#L4769-L4778", "sha": "793271834cca296f28b6321d258821b8486061f1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "listCacheHas$1", "code": "function listCacheHas$1(key) {\n return assocIndexOf$1(this.__data__, key) > -1;\n }", "docstring": "/**\n * Checks if a list cache value for `key` exists.\n *\n * @private\n * @name has\n * @memberOf ListCache\n * @param {string} key The key of the entry to check.\n * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.\n */", "url": "https://github.com/guidone/lets-form/blob/793271834cca296f28b6321d258821b8486061f1/dist/utils-umd/main.js#L1561-L1563", "sha": "793271834cca296f28b6321d258821b8486061f1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isIndex$3", "code": "function isIndex$3(value, length) {\n var type = typeof value;\n length = length == null ? MAX_SAFE_INTEGER$1 : length;\n return !!length && (type == 'number' || type != 'symbol' && reIsUint.test(value)) && value > -1 && value % 1 == 0 && value < length;\n }", "docstring": "/**\n * Checks if `value` is a valid array-like index.\n *\n * @private\n * @param {*} value The value to check.\n * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.\n * @returns {boolean} Returns `true` if `value` is a valid index, else `false`.\n */", "url": "https://github.com/guidone/lets-form/blob/793271834cca296f28b6321d258821b8486061f1/dist/utils-umd/main.js#L2047-L2051", "sha": "793271834cca296f28b6321d258821b8486061f1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "bnpFromString", "code": "function bnpFromString(s,b) {\n var k;\n if(b == 16) k = 4;\n else if(b == 8) k = 3;\n else if(b == 256) k = 8; // byte array\n else if(b == 2) k = 1;\n else if(b == 32) k = 5;\n else if(b == 4) k = 2;\n else { this.fromRadix(s,b); return; }\n this.t = 0;\n this.s = 0;\n var i = s.length, mi = false, sh = 0;\n while(--i >= 0) {\n var x = (k==8)?s[i]&0xff:intAt(s,i);\n if(x < 0) {\n if(s.charAt(i) == \"-\") mi = true;\n continue;\n }\n mi = false;\n if(sh == 0)\n this[this.t++] = x;\n else if(sh+k > this.DB) {\n this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh));\n }\n else\n this[this.t-1] |= x<= this.DB) sh -= this.DB;\n }\n if(k == 8 && (s[0]&0x80) != 0) {\n this.s = -1;\n if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)< -1)\n { signalCursorActivity(cm) }\n\n updateDoc(doc, change, spans, estimateHeight(cm))\n\n if (!cm.options.lineWrapping) {\n doc.iter(checkWidthStart, from.line + change.text.length, function (line) {\n var len = lineLength(line)\n if (len > display.maxLineLength) {\n display.maxLine = line\n display.maxLineLength = len\n display.maxLineChanged = true\n recomputeMaxLength = false\n }\n })\n if (recomputeMaxLength) { cm.curOp.updateMaxLine = true }\n }\n\n // Adjust frontier, schedule worker\n doc.frontier = Math.min(doc.frontier, from.line)\n startWorker(cm, 400)\n\n var lendiff = change.text.length - (to.line - from.line) - 1\n // Remember that these lines changed, for updating the display\n if (change.full)\n { regChange(cm) }\n else if (from.line == to.line && change.text.length == 1 && !isWholeLineUpdate(cm.doc, change))\n { regLineChange(cm, from.line, \"text\") }\n else\n { regChange(cm, from.line, to.line + 1, lendiff) }\n\n var changesHandler = hasHandler(cm, \"changes\"), changeHandler = hasHandler(cm, \"change\")\n if (changeHandler || changesHandler) {\n var obj = {\n from: from, to: to,\n text: change.text,\n removed: change.removed,\n origin: change.origin\n }\n if (changeHandler) { signalLater(cm, \"change\", cm, obj) }\n if (changesHandler) { (cm.curOp.changeObjs || (cm.curOp.changeObjs = [])).push(obj) }\n }\n cm.display.selForContextMenu = null\n}", "docstring": "// Handle the interaction of a change to a document with the editor", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/_content/tour/fre/static/lib/codemirror/lib/codemirror.js#L5195-L5252", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "attachLocalSpans", "code": "function attachLocalSpans(doc, change, from, to) {\n var existing = change[\"spans_\" + doc.id], n = 0\n doc.iter(Math.max(doc.first, from), Math.min(doc.first + doc.size, to), function (line) {\n if (line.markedSpans)\n { (existing || (existing = change[\"spans_\" + doc.id] = {}))[n] = line.markedSpans }\n ++n\n })\n}", "docstring": "// Used to store marked span information in the history.", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/_content/tour/ita/static/lib/codemirror/lib/codemirror.js#L4724-L4731", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "countColumn", "code": "function countColumn(string, end, tabSize, startIndex, startValue) {\n if (end == null) {\n end = string.search(/[^\\s\\u00a0]/)\n if (end == -1) { end = string.length }\n }\n for (var i = startIndex || 0, n = startValue || 0;;) {\n var nextTab = string.indexOf(\"\\t\", i)\n if (nextTab < 0 || nextTab >= end)\n { return n + (end - i) }\n n += nextTab - i\n n += tabSize - (n % tabSize)\n i = nextTab + 1\n }\n}", "docstring": "// Counts the column offset in a string, taking tabs into account.", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/_content/tour/pol/static/lib/codemirror/lib/codemirror.js#L161-L174", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "handleCharBinding", "code": "function handleCharBinding(cm, e, ch) {\n return dispatchKey(cm, \"'\" + ch + \"'\", e, function (b) { return doHandleBinding(cm, b, true); })\n}", "docstring": "// Handle a key from the keypress event", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/_content/tour/por/static/lib/codemirror/lib/codemirror.js#L6858-L6860", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LinkedList", "code": "function LinkedList() {\n\t\t/** @type {LinkedListNode} */\n\t\tvar head = { value: null, prev: null, next: null };\n\t\t/** @type {LinkedListNode} */\n\t\tvar tail = { value: null, prev: head, next: null };\n\t\thead.next = tail;\n\n\t\t/** @type {LinkedListNode} */\n\t\tthis.head = head;\n\t\t/** @type {LinkedListNode} */\n\t\tthis.tail = tail;\n\t\tthis.length = 0;\n\t}", "docstring": "/**\n\t * @typedef LinkedListNode\n\t * @property {T} value\n\t * @property {LinkedListNode | null} prev The previous node.\n\t * @property {LinkedListNode | null} next The next node.\n\t * @template T\n\t * @private\n\t */", "url": "https://github.com/royerlab/napari-chatgpt/blob/59b996a8547bd315004773bee942a2fae925ae43/src/napari_chatgpt/chat_server/static/prism.js#L1078-L1090", "sha": "59b996a8547bd315004773bee942a2fae925ae43"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LazyError.constructor", "code": "constructor(headline, more) {\n super(stripAnsi(headline))\n this.headline = headline\n this.more = more\n }", "docstring": "/**\n * @param {string} headline\n * @param {{ error?: Error, detail?: string }} [more]\n */", "url": "https://github.com/ds300/lazyrepo/blob/c67eda9d332b85c135f8a69a23570a67b3b1f698/src/logger/LazyError.js#L9-L13", "sha": "c67eda9d332b85c135f8a69a23570a67b3b1f698"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ensureCtor", "code": "function ensureCtor (comp, base) {\n if (\n comp.__esModule ||\n (hasSymbol && comp[Symbol.toStringTag] === 'Module')\n ) {\n comp = comp.default;\n }\n return isObject(comp)\n ? base.extend(comp)\n : comp\n }", "docstring": "/* */", "url": "https://github.com/sing-web/x-ui/blob/0cd2536025dd3115fa3809c04fd6b370b3f48704/web/assets/vue@2.6.12/vue.runtime.js#L3584-L3594", "sha": "0cd2536025dd3115fa3809c04fd6b370b3f48704"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "length", "code": "function length(val) {\n return val.toString().length;\n}", "docstring": "/**\n * Get the string length of `val`\n */", "url": "https://github.com/2833844911/cy_jsvmp/blob/f714b87f46b8c638924c697ca8ef6b3745e3a729/node_modules/expand-range/node_modules/fill-range/index.js#L406-L408", "sha": "f714b87f46b8c638924c697ca8ef6b3745e3a729"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "baseIndexOfWith", "code": "function baseIndexOfWith(array, value, fromIndex, comparator) {\n var index = fromIndex - 1,\n length = array.length;\n\n while (++index < length) {\n if (comparator(array[index], value)) {\n return index;\n }\n }\n return -1;\n}", "docstring": "/**\n * This function is like `baseIndexOf` except that it accepts a comparator.\n *\n * @private\n * @param {Array} array The array to inspect.\n * @param {*} value The value to search for.\n * @param {number} fromIndex The index to search from.\n * @param {Function} comparator The comparator invoked per element.\n * @returns {number} Returns the index of the matched value, else `-1`.\n */", "url": "https://github.com/2833844911/cy_jsvmp/blob/f714b87f46b8c638924c697ca8ef6b3745e3a729/node_modules/lodash/_baseIndexOfWith.js#L11-L21", "sha": "f714b87f46b8c638924c697ca8ef6b3745e3a729"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isIndex", "code": "function isIndex(value, length) {\n var type = typeof value;\n length = length == null ? MAX_SAFE_INTEGER : length;\n\n return !!length &&\n (type == 'number' ||\n (type != 'symbol' && reIsUint.test(value))) &&\n (value > -1 && value % 1 == 0 && value < length);\n }", "docstring": "/**\n * Checks if `value` is a valid array-like index.\n *\n * @private\n * @param {*} value The value to check.\n * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.\n * @returns {boolean} Returns `true` if `value` is a valid index, else `false`.\n */", "url": "https://github.com/2833844911/cy_jsvmp/blob/f714b87f46b8c638924c697ca8ef6b3745e3a729/node_modules/lodash/lodash.js#L6324-L6332", "sha": "f714b87f46b8c638924c697ca8ef6b3745e3a729"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "baseCreate", "code": "function baseCreate(prototype) {\n if (!isObject(prototype)) return {};\n if (nativeCreate) return nativeCreate(prototype);\n var Ctor = ctor();\n Ctor.prototype = prototype;\n var result = new Ctor;\n Ctor.prototype = null;\n return result;\n }", "docstring": "// An internal function for creating a new object that inherits from another.", "url": "https://github.com/spotify/voyager/blob/8e724398a57d4eca63d6e0dc2cd051d172fea95a/docs/python/_static/underscore-1.13.1.js#L610-L618", "sha": "8e724398a57d4eca63d6e0dc2cd051d172fea95a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "preventSelection", "code": "function preventSelection(el) {\n el.style.userSelect = 'none';\n el.style.webkitUserSelect = 'none';\n el.addEventListener('selectstart', preventDefault);\n }", "docstring": "/* Selection\n ----------------------------------------------------------------------------------------------------------------------*/", "url": "https://github.com/ProbiusOfficial/Hello-CTF/blob/d51bc86bafa737be4798db83b2a5b253f9f0842c/docs/javascripts/FullCalendar.js#L354-L358", "sha": "d51bc86bafa737be4798db83b2a5b253f9f0842c"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "kvjs.zintercard", "code": "zintercard(...keys) {\n const intersection = this.ZINTER(...keys);\n return intersection.size;\n }", "docstring": "/**\n * Get the number of members in the intersection between the given sorted sets stored at the specified keys.\n *\n * @param {...string} keys - The keys where the sorted sets are stored.\n * @returns {number} - The number of members in the intersection.\n */", "url": "https://github.com/HeyPuter/kv.js/blob/749e24c6868b3f048e8858e45a35d79b53ad7eb6/kv.js#L1852-L1855", "sha": "749e24c6868b3f048e8858e45a35d79b53ad7eb6"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "toType", "code": "function toType(obj) {\n if (obj === null || typeof obj === 'undefined') {\n return \"\" + obj;\n }\n\n return {}.toString.call(obj).match(/\\s([a-z]+)/i)[1].toLowerCase();\n }", "docstring": "// Shoutout AngusCroll (https://goo.gl/pxwQGp)", "url": "https://github.com/openkoda/openkoda/blob/d86335959f5fb1da2fa58a9a5c7f02065d322975/openkoda/src/main/resources/public/vendor/bootstrap/js/bootstrap.bundle.js#L70-L76", "sha": "d86335959f5fb1da2fa58a9a5c7f02065d322975"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "createFxNow", "code": "function createFxNow() {\n\twindow.setTimeout( function() {\n\t\tfxNow = undefined;\n\t} );\n\treturn ( fxNow = jQuery.now() );\n}", "docstring": "// Animations created synchronously will run synchronously", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/plugins/jquery/jquery.js#L7498-L7503", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cleanupStylesWhenDeleting", "code": "function cleanupStylesWhenDeleting() {\n var doc = editor.getDoc(),\n dom = editor.dom,\n selection = editor.selection;\n var MutationObserver = window.MutationObserver,\n olderWebKit, dragStartRng;\n\n // Add mini polyfill for older WebKits\n // TODO: Remove this when old Safari versions gets updated\n if (!MutationObserver) {\n olderWebKit = true;\n\n MutationObserver = function() {\n var records = [],\n target;\n\n function nodeInsert(e) {\n var target = e.relatedNode || e.target;\n records.push({ target: target, addedNodes: [target] });\n }\n\n function attrModified(e) {\n var target = e.relatedNode || e.target;\n records.push({ target: target, attributeName: e.attrName });\n }\n\n this.observe = function(node) {\n target = node;\n target.addEventListener('DOMSubtreeModified', nodeInsert, false);\n target.addEventListener('DOMNodeInsertedIntoDocument', nodeInsert, false);\n target.addEventListener('DOMNodeInserted', nodeInsert, false);\n target.addEventListener('DOMAttrModified', attrModified, false);\n };\n\n this.disconnect = function() {\n target.removeEventListener('DOMSubtreeModified', nodeInsert, false);\n target.removeEventListener('DOMNodeInsertedIntoDocument', nodeInsert, false);\n target.removeEventListener('DOMNodeInserted', nodeInsert, false);\n target.removeEventListener('DOMAttrModified', attrModified, false);\n };\n\n this.takeRecords = function() {\n return records;\n };\n };\n }\n\n function isTrailingBr(node) {\n var blockElements = dom.schema.getBlockElements(),\n rootNode = editor.getBody();\n\n if (node.nodeName != 'BR') {\n return false;\n }\n\n for (; node != rootNode && !blockElements[node.nodeName]; node = node.parentNode) {\n if (node.nextSibling) {\n return false;\n }\n }\n\n return true;\n }\n\n function isSiblingsIgnoreWhiteSpace(node1, node2) {\n var node;\n\n for (node = node1.nextSibling; node && node != node2; node = node.nextSibling) {\n if (node.nodeType == 3 && $.trim(node.data).length === 0) {\n continue;\n }\n\n if (node !== node2) {\n return false;\n }\n }\n\n return node === node2;\n }\n\n function findCaretNode(node, forward, startNode) {\n var walker, current, nonEmptyElements;\n\n nonEmptyElements = dom.schema.getNonEmptyElements();\n\n walker = new TreeWalker(startNode || node, node);\n\n while ((current = walker[forward ? 'next' : 'prev']())) {\n if (nonEmptyElements[current.nodeName] && !isTrailingBr(current)) {\n return current;\n }\n\n if (current.nodeType == 3 && current.data.length > 0) {\n return current;\n }\n }\n }\n\n function deleteRangeBetweenTextBlocks(rng) {\n var startBlock, endBlock, caretNodeBefore, caretNodeAfter, textBlockElements;\n\n if (rng.collapsed) {\n return;\n }\n\n startBlock = dom.getParent(RangeUtils.getNode(rng.startContainer, rng.startOffset), dom.isBlock);\n endBlock = dom.getParent(RangeUtils.getNode(rng.endContainer, rng.endOffset), dom.isBlock);\n textBlockElements = editor.schema.getTextBlockElements();\n\n if (startBlock == endBlock) {\n return;\n }\n\n if (!textBlockElements[startBlock.nodeName] || !textBlockElements[endBlock.nodeName]) {\n return;\n }\n\n if (dom.getContentEditable(startBlock) === \"false\" || dom.getContentEditable(endBlock) === \"false\") {\n return;\n }\n\n rng.deleteContents();\n\n caretNodeBefore = findCaretNode(startBlock, false);\n caretNodeAfter = findCaretNode(endBlock, true);\n\n if (!dom.isEmpty(endBlock)) {\n $(startBlock).append(endBlock.childNodes);\n }\n\n $(endBlock).remove();\n\n if (caretNodeBefore) {\n if (caretNodeBefore.nodeType == 1) {\n if (caretNodeBefore.nodeName == \"BR\") {\n rng.setStartBefore(caretNodeBefore);\n rng.setEndBefore(caretNodeBefore);\n } else {\n rng.setStartAfter(caretNodeBefore);\n rng.setEndAfter(caretNodeBefore);\n }\n } else {\n rng.setStart(caretNodeBefore, caretNodeBefore.data.length);\n rng.setEnd(caretNodeBefore, caretNodeBefore.data.length);\n }\n } else if (caretNodeAfter) {\n if (caretNodeAfter.nodeType == 1) {\n rng.setStartBefore(caretNodeAfter);\n rng.setEndBefore(caretNodeAfter);\n } else {\n rng.setStart(caretNodeAfter, 0);\n rng.setEnd(caretNodeAfter, 0);\n }\n }\n\n selection.setRng(rng);\n\n return true;\n }\n\n function expandBetweenBlocks(rng, isForward) {\n var caretNode, targetCaretNode, textBlock, targetTextBlock, container, offset;\n\n if (!rng.collapsed) {\n return rng;\n }\n\n container = rng.startContainer;\n offset = rng.startOffset;\n\n if (container.nodeType == 3) {\n if (isForward) {\n if (offset < container.data.length) {\n return rng;\n }\n } else {\n if (offset > 0) {\n return rng;\n }\n }\n }\n\n caretNode = RangeUtils.getNode(rng.startContainer, rng.startOffset);\n textBlock = dom.getParent(caretNode, dom.isBlock);\n targetCaretNode = findCaretNode(editor.getBody(), isForward, caretNode);\n targetTextBlock = dom.getParent(targetCaretNode, dom.isBlock);\n\n if (!caretNode || !targetCaretNode) {\n return rng;\n }\n\n if (targetTextBlock && textBlock != targetTextBlock) {\n if (!isForward) {\n if (!isSiblingsIgnoreWhiteSpace(targetTextBlock, textBlock)) {\n return rng;\n }\n\n if (targetCaretNode.nodeType == 1) {\n if (targetCaretNode.nodeName == \"BR\") {\n rng.setStartBefore(targetCaretNode);\n } else {\n rng.setStartAfter(targetCaretNode);\n }\n } else {\n rng.setStart(targetCaretNode, targetCaretNode.data.length);\n }\n\n if (caretNode.nodeType == 1) {\n rng.setEnd(caretNode, 0);\n } else {\n rng.setEndBefore(caretNode);\n }\n } else {\n if (!isSiblingsIgnoreWhiteSpace(textBlock, targetTextBlock)) {\n return rng;\n }\n\n if (caretNode.nodeType == 1) {\n if (caretNode.nodeName == \"BR\") {\n rng.setStartBefore(caretNode);\n } else {\n rng.setStartAfter(caretNode);\n }\n } else {\n rng.setStart(caretNode, caretNode.data.length);\n }\n\n if (targetCaretNode.nodeType == 1) {\n rng.setEnd(targetCaretNode, 0);\n } else {\n rng.setEndBefore(targetCaretNode);\n }\n }\n }\n\n return rng;\n }\n\n function handleTextBlockMergeDelete(isForward) {\n var rng = selection.getRng();\n\n rng = expandBetweenBlocks(rng, isForward);\n\n if (deleteRangeBetweenTextBlocks(rng)) {\n return true;\n }\n }\n\n /**\n * This retains the formatting if the last character is to be deleted.\n *\n * Backspace on this:

    a|

    would become

    |

    in WebKit.\n * With this patch:

    |

    \n */\n function handleLastBlockCharacterDelete(isForward, rng) {\n var path, blockElm, newBlockElm, clonedBlockElm, sibling,\n container, offset, br, currentFormatNodes;\n\n function cloneTextBlockWithFormats(blockElm, node) {\n currentFormatNodes = $(node).parents().filter(function(idx, node) {\n return !!editor.schema.getTextInlineElements()[node.nodeName];\n });\n\n newBlockElm = blockElm.cloneNode(false);\n\n currentFormatNodes = Tools.map(currentFormatNodes, function(formatNode) {\n formatNode = formatNode.cloneNode(false);\n\n if (newBlockElm.hasChildNodes()) {\n formatNode.appendChild(newBlockElm.firstChild);\n newBlockElm.appendChild(formatNode);\n } else {\n newBlockElm.appendChild(formatNode);\n }\n\n newBlockElm.appendChild(formatNode);\n\n return formatNode;\n });\n\n if (currentFormatNodes.length) {\n br = dom.create('br');\n currentFormatNodes[0].appendChild(br);\n dom.replace(newBlockElm, blockElm);\n\n rng.setStartBefore(br);\n rng.setEndBefore(br);\n editor.selection.setRng(rng);\n\n return br;\n }\n\n return null;\n }\n\n function isTextBlock(node) {\n return node && editor.schema.getTextBlockElements()[node.tagName];\n }\n\n if (!rng.collapsed) {\n return;\n }\n\n container = rng.startContainer;\n offset = rng.startOffset;\n blockElm = dom.getParent(container, dom.isBlock);\n if (!isTextBlock(blockElm)) {\n return;\n }\n\n if (container.nodeType == 1) {\n container = container.childNodes[offset];\n if (container && container.tagName != 'BR') {\n return;\n }\n\n if (isForward) {\n sibling = blockElm.nextSibling;\n } else {\n sibling = blockElm.previousSibling;\n }\n\n if (dom.isEmpty(blockElm) && isTextBlock(sibling) && dom.isEmpty(sibling)) {\n if (cloneTextBlockWithFormats(blockElm, container)) {\n dom.remove(sibling);\n return true;\n }\n }\n } else if (container.nodeType == 3) {\n path = NodePath.create(blockElm, container);\n clonedBlockElm = blockElm.cloneNode(true);\n container = NodePath.resolve(clonedBlockElm, path);\n\n if (isForward) {\n if (offset >= container.data.length) {\n return;\n }\n\n container.deleteData(offset, 1);\n } else {\n if (offset <= 0) {\n return;\n }\n\n container.deleteData(offset - 1, 1);\n }\n\n if (dom.isEmpty(clonedBlockElm)) {\n return cloneTextBlockWithFormats(blockElm, container);\n }\n }\n }\n\n function customDelete(isForward) {\n var mutationObserver, rng, caretElement;\n\n if (handleTextBlockMergeDelete(isForward)) {\n return;\n }\n\n Tools.each(editor.getBody().getElementsByTagName('*'), function(elm) {\n // Mark existing spans\n if (elm.tagName == 'SPAN') {\n elm.setAttribute('mce-data-marked', 1);\n }\n\n // Make sure all elements has a data-mce-style attribute\n if (!elm.hasAttribute('data-mce-style') && elm.hasAttribute('style')) {\n editor.dom.setAttrib(elm, 'style', editor.dom.getAttrib(elm, 'style'));\n }\n });\n\n // Observe added nodes and style attribute changes\n mutationObserver = new MutationObserver(function() {});\n mutationObserver.observe(editor.getDoc(), {\n childList: true,\n attributes: true,\n subtree: true,\n attributeFilter: ['style']\n });\n\n editor.getDoc().execCommand(isForward ? 'ForwardDelete' : 'Delete', false, null);\n\n rng = editor.selection.getRng();\n caretElement = rng.startContainer.parentNode;\n\n Tools.each(mutationObserver.takeRecords(), function(record) {\n if (!dom.isChildOf(record.target, editor.getBody())) {\n return;\n }\n\n // Restore style attribute to previous value\n if (record.attributeName == \"style\") {\n var oldValue = record.target.getAttribute('data-mce-style');\n\n if (oldValue) {\n record.target.setAttribute(\"style\", oldValue);\n } else {\n record.target.removeAttribute(\"style\");\n }\n }\n\n // Remove all spans that aren't marked and retain selection\n Tools.each(record.addedNodes, function(node) {\n if (node.nodeName == \"SPAN\" && !node.getAttribute('mce-data-marked')) {\n var offset, container;\n\n if (node == caretElement) {\n offset = rng.startOffset;\n container = node.firstChild;\n }\n\n dom.remove(node, true);\n\n if (container) {\n rng.setStart(container, offset);\n rng.setEnd(container, offset);\n editor.selection.setRng(rng);\n }\n }\n });\n });\n\n mutationObserver.disconnect();\n\n // Remove any left over marks\n Tools.each(editor.dom.select('span[mce-data-marked]'), function(span) {\n span.removeAttribute('mce-data-marked');\n });\n }\n\n editor.on('keydown', function(e) {\n var isForward = e.keyCode == DELETE,\n isMetaOrCtrl = e.ctrlKey || e.metaKey;\n\n if (!isDefaultPrevented(e) && (isForward || e.keyCode == BACKSPACE)) {\n var rng = editor.selection.getRng(),\n container = rng.startContainer,\n offset = rng.startOffset;\n\n // Shift+Delete is cut\n if (isForward && e.shiftKey) {\n return;\n }\n\n if (handleLastBlockCharacterDelete(isForward, rng)) {\n e.preventDefault();\n return;\n }\n\n // Ignore non meta delete in the where there is text before/after the caret\n if (!isMetaOrCtrl && rng.collapsed && container.nodeType == 3) {\n if (isForward ? offset < container.data.length : offset > 0) {\n return;\n }\n }\n\n e.preventDefault();\n\n if (isMetaOrCtrl) {\n editor.selection.getSel().modify(\"extend\", isForward ? \"forward\" : \"backward\", e.metaKey ? \"lineboundary\" : \"word\");\n }\n\n customDelete(isForward);\n }\n });\n\n // Handle case where text is deleted by typing over\n editor.on('keypress', function(e) {\n if (!isDefaultPrevented(e) && !selection.isCollapsed() && e.charCode > 31 && !VK.metaKeyPressed(e)) {\n var rng, currentFormatNodes, fragmentNode, blockParent, caretNode, charText;\n\n rng = editor.selection.getRng();\n charText = String.fromCharCode(e.charCode);\n e.preventDefault();\n\n // Keep track of current format nodes\n currentFormatNodes = $(rng.startContainer).parents().filter(function(idx, node) {\n return !!editor.schema.getTextInlineElements()[node.nodeName];\n });\n\n customDelete(true);\n\n // Check if the browser removed them\n currentFormatNodes = currentFormatNodes.filter(function(idx, node) {\n return !$.contains(editor.getBody(), node);\n });\n\n // Then re-add them\n if (currentFormatNodes.length) {\n fragmentNode = dom.createFragment();\n\n currentFormatNodes.each(function(idx, formatNode) {\n formatNode = formatNode.cloneNode(false);\n\n if (fragmentNode.hasChildNodes()) {\n formatNode.appendChild(fragmentNode.firstChild);\n fragmentNode.appendChild(formatNode);\n } else {\n caretNode = formatNode;\n fragmentNode.appendChild(formatNode);\n }\n\n fragmentNode.appendChild(formatNode);\n });\n\n caretNode.appendChild(editor.getDoc().createTextNode(charText));\n\n // Prevent edge case where older WebKit would add an extra BR element\n blockParent = dom.getParent(rng.startContainer, dom.isBlock);\n if (dom.isEmpty(blockParent)) {\n $(blockParent).empty().append(fragmentNode);\n } else {\n rng.insertNode(fragmentNode);\n }\n\n rng.setStart(caretNode.firstChild, 1);\n rng.setEnd(caretNode.firstChild, 1);\n editor.selection.setRng(rng);\n } else {\n editor.selection.setContent(charText);\n }\n }\n });\n\n editor.addCommand('Delete', function() {\n customDelete();\n });\n\n editor.addCommand('ForwardDelete', function() {\n customDelete(true);\n });\n\n // Older WebKits doesn't properly handle the clipboard so we can't add the rest\n if (olderWebKit) {\n return;\n }\n\n editor.on('dragstart', function(e) {\n dragStartRng = selection.getRng();\n setMceInternalContent(e);\n });\n\n editor.on('drop', function(e) {\n if (!isDefaultPrevented(e)) {\n var internalContent = getMceInternalContent(e);\n\n if (internalContent) {\n e.preventDefault();\n\n // Safari has a weird issue where drag/dropping images sometimes\n // produces a green plus icon. When this happens the caretRangeFromPoint\n // will return \"null\" even though the x, y coordinate is correct.\n // But if we detach the insert from the drop event we will get a proper range\n Delay.setEditorTimeout(editor, function() {\n var pointRng = RangeUtils.getCaretRangeFromPoint(e.x, e.y, doc);\n\n if (dragStartRng) {\n selection.setRng(dragStartRng);\n dragStartRng = null;\n }\n\n customDelete();\n selection.setRng(pointRng);\n insertClipboardContents(internalContent.html);\n });\n }\n }\n });\n\n editor.on('cut', function(e) {\n if (!isDefaultPrevented(e) && e.clipboardData && !editor.selection.isCollapsed()) {\n e.preventDefault();\n e.clipboardData.clearData();\n e.clipboardData.setData('text/html', editor.selection.getContent());\n e.clipboardData.setData('text/plain', editor.selection.getContent({ format: 'text' }));\n\n // Needed delay for https://code.google.com/p/chromium/issues/detail?id=363288#c3\n // Nested delete/forwardDelete not allowed on execCommand(\"cut\")\n // This is ugly but not sure how to work around it otherwise\n Delay.setEditorTimeout(editor, function() {\n customDelete(true);\n });\n }\n });\n }", "docstring": "/**\n * Fixes a WebKit bug when deleting contents using backspace or delete key.\n * WebKit will produce a span element if you delete across two block elements.\n *\n * Example:\n *

    a

    |b

    \n *\n * Will produce this on backspace:\n *

    a\">b

    \n *\n * This fixes the backspace to produce:\n *

    a|b

    \n *\n * See bug: https://bugs.webkit.org/show_bug.cgi?id=45784\n *\n * This fixes the following delete scenarios:\n * 1. Delete by pressing backspace key.\n * 2. Delete by pressing delete key.\n * 3. Delete by pressing backspace key with ctrl/cmd (Word delete).\n * 4. Delete by pressing delete key with ctrl/cmd (Word delete).\n * 5. Delete by drag/dropping contents inside the editor.\n * 6. Delete by using Cut Ctrl+X/Cmd+X.\n * 7. Delete by selecting contents and writing a character.\n *\n * This code is a ugly hack since writing full custom delete logic for just this bug\n * fix seemed like a huge task. I hope we can remove this before the year 2030.\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/plugins/tinymce/tinymce.jquery.min.js#L32436-L33021", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isDefaultPrevented", "code": "function isDefaultPrevented(e) {\n\t\t\treturn e.isDefaultPrevented();\n\t\t}", "docstring": "/**\n\t\t * Returns true/false if the event is prevented or not.\n\t\t *\n\t\t * @private\n\t\t * @param {Event} e Event object.\n\t\t * @return {Boolean} true/false if the event is prevented or not.\n\t\t */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/plugins/tinymce/tinymce.js#L31914-L31916", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "setWrapToString", "code": "function setWrapToString(wrapper, reference, bitmask) {\n var source = (reference + '');\n return setToString(wrapper, insertWrapDetails(source, updateWrapDetails(getWrapDetails(source), bitmask)));\n }", "docstring": "/**\n * Sets the `toString` method of `wrapper` to mimic the source of `reference`\n * with wrapper details in a comment at the top of the source body.\n *\n * @private\n * @param {Function} wrapper The function to modify.\n * @param {Function} reference The reference function.\n * @param {number} bitmask The bitmask flags. See `createWrap` for more details.\n * @returns {Function} Returns `wrapper`.\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/public/js/app.js#L21044-L21047", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "forEachRight", "code": "function forEachRight(collection, iteratee) {\n var func = isArray(collection) ? arrayEachRight : baseEachRight;\n return func(collection, getIteratee(iteratee, 3));\n }", "docstring": "/**\n * This method is like `_.forEach` except that it iterates over elements of\n * `collection` from right to left.\n *\n * @static\n * @memberOf _\n * @since 2.0.0\n * @alias eachRight\n * @category Collection\n * @param {Array|Object} collection The collection to iterate over.\n * @param {Function} [iteratee=_.identity] The function invoked per iteration.\n * @returns {Array|Object} Returns `collection`.\n * @see _.forEach\n * @example\n *\n * _.forEachRight([1, 2], function(value) {\n * console.log(value);\n * });\n * // => Logs `2` then `1`.\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/public/js/app.js#L23744-L23747", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EffectScope.off", "code": "off() {\n activeEffectScope = this.parent;\n }", "docstring": "/**\n * This should only be called on non-detached scopes\n * @internal\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/public/js/category.js#L26469-L26471", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "noop", "code": "function noop(a, b, c) { }", "docstring": "/* eslint-disable no-unused-vars */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/public/js/invoice.js#L23215-L23215", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "checkKeyCodes", "code": "function checkKeyCodes(eventKeyCode, key, builtInKeyCode, eventKeyName, builtInKeyName) {\n const mappedKeyCode = config.keyCodes[key] || builtInKeyCode;\n if (builtInKeyName && eventKeyName && !config.keyCodes[key]) {\n return isKeyNotMatch(builtInKeyName, eventKeyName);\n }\n else if (mappedKeyCode) {\n return isKeyNotMatch(mappedKeyCode, eventKeyCode);\n }\n else if (eventKeyName) {\n return hyphenate(eventKeyName) !== key;\n }\n return eventKeyCode === undefined;\n}", "docstring": "/**\n * Runtime helper for checking keyCodes from config.\n * exposed as Vue.prototype._k\n * passing in eventKeyName as last argument separately for backwards compat\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/public/js/product.js#L24888-L24900", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "queueActivatedComponent", "code": "function queueActivatedComponent(vm) {\n // setting _inactive to false here so that a render function can\n // rely on checking whether it's in an inactive tree (e.g. router-view)\n vm._inactive = false;\n activatedChildren.push(vm);\n}", "docstring": "/**\n * Queue a kept-alive component that was activated during patch.\n * The queue will be processed after the entire tree has been patched.\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/public/js/role.js#L26168-L26173", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "nextTick", "code": "function nextTick(cb, ctx) {\n let _resolve;\n callbacks.push(() => {\n if (cb) {\n try {\n cb.call(ctx);\n }\n catch (e) {\n handleError(e, ctx, 'nextTick');\n }\n }\n else if (_resolve) {\n _resolve(ctx);\n }\n });\n if (!pending) {\n pending = true;\n timerFunc();\n }\n // $flow-disable-line\n if (!cb && typeof Promise !== 'undefined') {\n return new Promise(resolve => {\n _resolve = resolve;\n });\n }\n}", "docstring": "/**\n * @internal\n */", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/public/js/user.js#L26742-L26767", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "hydrate", "code": "function hydrate(elm, vnode, insertedVnodeQueue, inVPre) {\n let i;\n const { tag, data, children } = vnode;\n inVPre = inVPre || (data && data.pre);\n vnode.elm = elm;\n if (isTrue(vnode.isComment) && isDef(vnode.asyncFactory)) {\n vnode.isAsyncPlaceholder = true;\n return true;\n }\n // assert node match\n {\n if (!assertNodeMatch(elm, vnode, inVPre)) {\n return false;\n }\n }\n if (isDef(data)) {\n if (isDef((i = data.hook)) && isDef((i = i.init)))\n i(vnode, true /* hydrating */);\n if (isDef((i = vnode.componentInstance))) {\n // child component. it should have hydrated its own tree.\n initComponent(vnode, insertedVnodeQueue);\n return true;\n }\n }\n if (isDef(tag)) {\n if (isDef(children)) {\n // empty element, allow client to pick up and populate children\n if (!elm.hasChildNodes()) {\n createChildren(vnode, children, insertedVnodeQueue);\n }\n else {\n // v-html and domProps: innerHTML\n if (isDef((i = data)) &&\n isDef((i = i.domProps)) &&\n isDef((i = i.innerHTML))) {\n if (i !== elm.innerHTML) {\n /* istanbul ignore if */\n if (typeof console !== 'undefined' &&\n !hydrationBailed) {\n hydrationBailed = true;\n console.warn('Parent: ', elm);\n console.warn('server innerHTML: ', i);\n console.warn('client innerHTML: ', elm.innerHTML);\n }\n return false;\n }\n }\n else {\n // iterate and compare children lists\n let childrenMatch = true;\n let childNode = elm.firstChild;\n for (let i = 0; i < children.length; i++) {\n if (!childNode ||\n !hydrate(childNode, children[i], insertedVnodeQueue, inVPre)) {\n childrenMatch = false;\n break;\n }\n childNode = childNode.nextSibling;\n }\n // if childNode is not null, it means the actual childNodes list is\n // longer than the virtual children list.\n if (!childrenMatch || childNode) {\n /* istanbul ignore if */\n if (typeof console !== 'undefined' &&\n !hydrationBailed) {\n hydrationBailed = true;\n console.warn('Parent: ', elm);\n console.warn('Mismatching childNodes vs. VNodes: ', elm.childNodes, children);\n }\n return false;\n }\n }\n }\n }\n if (isDef(data)) {\n let fullInvoke = false;\n for (const key in data) {\n if (!isRenderedModule(key)) {\n fullInvoke = true;\n invokeCreateHooks(vnode, insertedVnodeQueue);\n break;\n }\n }\n if (!fullInvoke && data['class']) {\n // ensure collecting deps for deep class bindings for future updates\n traverse(data['class']);\n }\n }\n }\n else if (elm.data !== vnode.text) {\n elm.data = vnode.text;\n }\n return true;\n }", "docstring": "// Note: this is a browser-only function so we can assume elms are DOM nodes.", "url": "https://github.com/carmonabernaldiego/inventory/blob/f0fa12c3e312cab38147ff9c204945667f46da75/public/js/user.js#L29871-L29964", "sha": "f0fa12c3e312cab38147ff9c204945667f46da75"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MRHand.constructor", "code": "constructor(handedness, scene) {\n this.handedness = handedness;\n this.pinch = false;\n\n this.lastPosition = new THREE.Vector3();\n this.active = false;\n\n this.jointPhysicsBodies = {};\n this.pointer = new THREE.Object3D();\n\n this.identityPosition = new THREE.Vector3();\n\n this.tempJointPosition = new THREE.Vector3();\n this.tempJointOrientation = new THREE.Quaternion();\n\n this.controllerModelFactory = new XRControllerModelFactory();\n this.handModelFactory = new XRHandModelFactory();\n\n this.mesh;\n this.controller = mrjsUtils.xr.getController(HAND_MAPPING[handedness]);\n this.controller.add(this.pointer);\n this.pointer.position.setZ(-0.5);\n\n this.grip = mrjsUtils.xr.getControllerGrip(HAND_MAPPING[handedness]);\n this.grip.add(this.controllerModelFactory.createControllerModel(this.grip));\n\n this.hand = mrjsUtils.xr.getHand(HAND_MAPPING[handedness]);\n this.model = this.handModelFactory.createHandModel(this.hand, 'mesh');\n\n this.hand.add(this.model);\n\n this.hand.addEventListener('selectstart', this.onSelect);\n this.hand.addEventListener('selectend', this.onSelect);\n\n scene.add(this.controller);\n scene.add(this.grip);\n scene.add(this.hand);\n this.initPhysicsBodies();\n }", "docstring": "/**\n * @class\n * @description Constructor for the MRHand class object. Setups up all attributes for MRHand including physics, mouse/cursor information, hand tracking and state, and model\n * @param {object} handedness - enum for the `left`` or `right` hand.\n * @param {object} scene - the threejs scene object with information from the MRApp.\n */", "url": "https://github.com/Volumetrics-io/mrjs/blob/8b31dbfc30f9c12ba74dd59ca44bde6e7de32f79/src/core/user/MRHand.js#L67-L105", "sha": "8b31dbfc30f9c12ba74dd59ca44bde6e7de32f79"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "logStacks", "code": "func logStacks() {\n\ttime.Sleep(time.Second) // Hack to wait for goroutines to end\n\tbuf := make([]byte, 1<<20)\n\tn := runtime.Stack(buf, true)\n\tfmt.Fprintln(os.Stderr, string(buf[:n]))\n}", "docstring": "// logStacks prints the stacks of live goroutines. This functionality\n// is disabled by default but can be enabled to find background work that\n// is not obeying cancellation.", "url": "https://github.com/ServiceWeaver/weaver/blob/656901ccf43e1ffce053e83e573643697680c223/weavertest/init.go#L327-L332", "sha": "656901ccf43e1ffce053e83e573643697680c223"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewColorableStderr", "code": "func NewColorableStderr() io.Writer {\n\treturn os.Stderr\n}", "docstring": "// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.", "url": "https://github.com/mchmarny/s3cme/blob/f540c5d245ff91d37e7b4ebcf558d00fa0ecab81/vendor/github.com/mattn/go-colorable/colorable_others.go#L28-L30", "sha": "f540c5d245ff91d37e7b4ebcf558d00fa0ecab81"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Pause", "code": "func Pause() (err error) {\n\t_, _, e1 := Syscall(SYS_PAUSE, 0, 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mchmarny/s3cme/blob/f540c5d245ff91d37e7b4ebcf558d00fa0ecab81/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go#L393-L399", "sha": "f540c5d245ff91d37e7b4ebcf558d00fa0ecab81"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pwrite", "code": "func pwrite(fd int, p []byte, offset int64) (n int, err error) {\n\tvar _p0 *byte\n\tif len(p) > 0 {\n\t\t_p0 = &p[0]\n\t}\n\tvar _p1 int\n\t_p1 = len(p)\n\tr0, er := C.pwrite64(C.int(fd), C.uintptr_t(uintptr(unsafe.Pointer(_p0))), C.size_t(_p1), C.longlong(offset))\n\tn = int(r0)\n\tif r0 == -1 && er != nil {\n\t\terr = er\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mchmarny/s3cme/blob/f540c5d245ff91d37e7b4ebcf558d00fa0ecab81/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc.go#L981-L994", "sha": "f540c5d245ff91d37e7b4ebcf558d00fa0ecab81"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "callfstatat", "code": "func callfstatat(dirfd int, _p0 uintptr, stat uintptr, flags int) (r1 uintptr, e1 Errno) {\n\tr1, _, e1 = syscall6(uintptr(unsafe.Pointer(&libc_fstatat)), 4, uintptr(dirfd), _p0, stat, uintptr(flags), 0, 0)\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mchmarny/s3cme/blob/f540c5d245ff91d37e7b4ebcf558d00fa0ecab81/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gc.go#L814-L817", "sha": "f540c5d245ff91d37e7b4ebcf558d00fa0ecab81"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "calltruncate", "code": "func calltruncate(_p0 uintptr, length int64) (r1 uintptr, e1 Errno) {\n\tr1, _, e1 = syscall6(uintptr(unsafe.Pointer(&libc_truncate)), 2, _p0, uintptr(length), 0, 0, 0, 0)\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mchmarny/s3cme/blob/f540c5d245ff91d37e7b4ebcf558d00fa0ecab81/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gc.go#L961-L964", "sha": "f540c5d245ff91d37e7b4ebcf558d00fa0ecab81"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "callsetsid", "code": "func callsetsid() (r1 uintptr, e1 Errno) {\n\tr1 = uintptr(C.setsid())\n\te1 = syscall.GetErrno()\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/Yakitrak/obsidian-cli/blob/0215c9cc6fdeb4d4a265bee4331837bdb9c7b7b7/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gccgo.go#L497-L501", "sha": "0215c9cc6fdeb4d4a265bee4331837bdb9c7b7b7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Ftruncate", "code": "func Ftruncate(fd int, length int64) (err error) {\n\t_, _, e1 := Syscall(SYS_FTRUNCATE, uintptr(fd), uintptr(length), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/Yakitrak/obsidian-cli/blob/0215c9cc6fdeb4d4a265bee4331837bdb9c7b7b7/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go#L989-L995", "sha": "0215c9cc6fdeb4d4a265bee4331837bdb9c7b7b7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Flock", "code": "func Flock(fd int, how int) (err error) {\n\t_, _, e1 := syscall_syscall(libc_flock_trampoline_addr, uintptr(fd), uintptr(how), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_fchownat fchownat \"libc.so\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/Yakitrak/obsidian-cli/blob/0215c9cc6fdeb4d4a265bee4331837bdb9c7b7b7/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go#L951-L957", "sha": "0215c9cc6fdeb4d4a265bee4331837bdb9c7b7b7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Mlock", "code": "func Mlock(b []byte) (err error) {\n\tvar _p0 unsafe.Pointer\n\tif len(b) > 0 {\n\t\t_p0 = unsafe.Pointer(&b[0])\n\t} else {\n\t\t_p0 = unsafe.Pointer(&_zero)\n\t}\n\t_, _, e1 := Syscall(SYS_MLOCK, uintptr(_p0), uintptr(len(b)), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/Yakitrak/obsidian-cli/blob/0215c9cc6fdeb4d4a265bee4331837bdb9c7b7b7/vendor/golang.org/x/sys/unix/zsyscall_netbsd_amd64.go#L269-L281", "sha": "0215c9cc6fdeb4d4a265bee4331837bdb9c7b7b7"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ShowTablesQuery", "code": "func (d *dbBaseOracle) ShowTablesQuery() string {\n\treturn \"SELECT TABLE_NAME FROM USER_TABLES\"\n}", "docstring": "// ShowTablesQuery show All the tables in database", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/github.com/beego/beego/v2/client/orm/db_oracle.go#L84-L86", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lookupUnsafe", "code": "func (t *idnaTrie) lookupUnsafe(s []byte) uint16 {\n\tc0 := s[0]\n\tif c0 < 0x80 { // is ASCII\n\t\treturn idnaValues[c0]\n\t}\n\ti := idnaIndex[c0]\n\tif c0 < 0xE0 { // 2-byte UTF-8\n\t\treturn t.lookupValue(uint32(i), s[1])\n\t}\n\ti = idnaIndex[uint32(i)<<6+uint32(s[1])]\n\tif c0 < 0xF0 { // 3-byte UTF-8\n\t\treturn t.lookupValue(uint32(i), s[2])\n\t}\n\ti = idnaIndex[uint32(i)<<6+uint32(s[2])]\n\tif c0 < 0xF8 { // 4-byte UTF-8\n\t\treturn t.lookupValue(uint32(i), s[3])\n\t}\n\treturn 0\n}", "docstring": "// lookupUnsafe returns the trie value for the first UTF-8 encoding in s.\n// s must start with a full and valid UTF-8 encoded rune.", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/golang.org/x/net/idna/tables15.0.0.go#L602-L620", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "callnrecvmsg", "code": "func callnrecvmsg(s int, msg uintptr, flags int) (r1 uintptr, e1 Errno) {\n\tr1 = uintptr(C.nrecvmsg(C.int(s), C.uintptr_t(msg), C.int(flags)))\n\te1 = syscall.GetErrno()\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gccgo.go#L913-L917", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Getegid", "code": "func Getegid() (egid int) {\n\tr0, _, _ := RawSyscall(SYS_GETEGID, 0, 0, 0)\n\tegid = int(r0)\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/golang.org/x/sys/unix/zsyscall_dragonfly_amd64.go#L815-L819", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Fstatat", "code": "func Fstatat(dirfd int, path string, stat *Stat_t, flags int) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := Syscall6(SYS_FSTATAT64, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), uintptr(flags), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go#L253-L264", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sendto", "code": "func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) {\n\tvar _p0 unsafe.Pointer\n\tif len(buf) > 0 {\n\t\t_p0 = unsafe.Pointer(&buf[0])\n\t} else {\n\t\t_p0 = unsafe.Pointer(&_zero)\n\t}\n\t_, _, e1 := Syscall6(SYS_SENDTO, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen))\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go#L481-L493", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SetLogger", "code": "func SetLogger(l Logger) {\n\tinternal.LoggerV2Impl = &internal.LoggerWrapper{Logger: l}\n}", "docstring": "// SetLogger sets the logger that is used in grpc. Call only from\n// init() functions.\n//\n// Deprecated: use SetLoggerV2.", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/google.golang.org/grpc/grpclog/logger.go#L32-L34", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sizeStringPtr", "code": "func sizeStringPtr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) {\n\tv := **p.StringPtr()\n\treturn f.tagsize + protowire.SizeBytes(len(v))\n}", "docstring": "// sizeStringPtr returns the size of wire encoding a *string pointer as a String.\n// It panics if the pointer is nil.", "url": "https://github.com/d3vilh/openvpn-ui/blob/690f84df426c13ad4742b61fd23e52fcdc489aa0/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go#L5129-L5132", "sha": "690f84df426c13ad4742b61fd23e52fcdc489aa0"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "newPluginManager", "code": "func newPluginManager(conf *katalystconfig.Configuration) (pluginmanager.PluginManager, error) {\n\t// make sure plugin registration directory already exist\n\terr := os.MkdirAll(conf.PluginRegistrationDir, os.FileMode(0o755))\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"initializes plugin registration dir failed: %s\", err)\n\t}\n\n\treturn &customizedPluginManager{\n\t\tenabled: *atomic.NewBool(false),\n\t\tPluginManager: pluginmanager.NewPluginManager(\n\t\t\tconf.PluginRegistrationDir, /* sockDir */\n\t\t\t&record.FakeRecorder{},\n\t\t),\n\t}, nil\n}", "docstring": "// newPluginManager initializes the registration logic for extendable plugins.\n// all plugin manager added to generic context must use the same socket and\n// default checkpoint path, and if some plugin needs to use a different socket path,\n// it should create the plugin manager itself.", "url": "https://github.com/kubewharf/katalyst-core/blob/cdf8732c6e66119576e697193d4351dced0cddd4/cmd/katalyst-agent/app/agent/context.go#L146-L160", "sha": "cdf8732c6e66119576e697193d4351dced0cddd4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ResourceName", "code": "func (p *StaticPolicy) ResourceName() string {\n\treturn string(apiconsts.ResourceNetBandwidth)\n}", "docstring": "// ResourceName returns resource names managed by this plugin", "url": "https://github.com/kubewharf/katalyst-core/blob/cdf8732c6e66119576e697193d4351dced0cddd4/pkg/agent/qrm-plugins/network/staticpolicy/policy.go#L256-L258", "sha": "cdf8732c6e66119576e697193d4351dced0cddd4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "processNextWorkItem", "code": "func (cl *CNRLifecycle) processNextWorkItem() bool {\n\tkey, quit := cl.syncQueue.Get()\n\tif quit {\n\t\treturn false\n\t}\n\tdefer cl.syncQueue.Done(key)\n\n\terr := cl.sync(key.(string))\n\tif err == nil {\n\t\tcl.syncQueue.Forget(key)\n\t\treturn true\n\t}\n\n\tutilruntime.HandleError(fmt.Errorf(\"sync %q failed with %v\", key, err))\n\tcl.syncQueue.AddRateLimited(key)\n\n\treturn true\n}", "docstring": "// processNextWorkItem dequeues items, processes them, and marks them done.\n// It enforces that the sync is never invoked concurrently with the same key.", "url": "https://github.com/kubewharf/katalyst-core/blob/cdf8732c6e66119576e697193d4351dced0cddd4/pkg/controller/lifecycle/cnr.go#L213-L230", "sha": "cdf8732c6e66119576e697193d4351dced0cddd4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "filterExpiredPodMetrics", "code": "func filterExpiredPodMetrics(metrics []apimetrics.PodMetrics) []apimetrics.PodMetrics {\n\tnow := time.Now()\n\treturn lo.Filter(metrics, func(item apimetrics.PodMetrics, _ int) bool {\n\t\treturn now.Before(item.Timestamp.Time)\n\t})\n}", "docstring": "// filterExpiredPodMetrics is used to filter out metrics before the current time from the metric slice.", "url": "https://github.com/kubewharf/katalyst-core/blob/cdf8732c6e66119576e697193d4351dced0cddd4/pkg/controller/spd/indicator-plugin/plugins/resource-portrait/util.go#L107-L112", "sha": "cdf8732c6e66119576e697193d4351dced0cddd4"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WithKeyMap", "code": "func (i *Input) WithKeyMap(k *KeyMap) Field {\n\ti.keymap = k.Input\n\ti.textinput.KeyMap.AcceptSuggestion = i.keymap.AcceptSuggestion\n\treturn i\n}", "docstring": "// WithKeyMap sets the keymap on an input field.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/charmbracelet/huh/field_input.go#L439-L443", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "literal", "code": "func literal(s string) string {\n\treturn regexp.QuoteMeta(s)\n}", "docstring": "// literal compiles s into a literal regular expression, escaping any regexp\n// reserved characters.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/containers/image/v5/docker/reference/regexp.go#L121-L123", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AddWith", "code": "func (w *Watcher) AddWith(name string, opts ...addOpt) error {\n\t_ = getOptions(opts...)\n\n\tw.mu.Lock()\n\tw.userWatches[name] = struct{}{}\n\tw.mu.Unlock()\n\t_, err := w.addWatch(name, noteAllEvents)\n\treturn err\n}", "docstring": "// AddWith is like [Watcher.Add], but allows adding options. When using Add()\n// the defaults described below are used.\n//\n// Possible options are:\n//\n// - [WithBufferSize] sets the buffer size for the Windows backend; no-op on\n// other platforms. The default is 64K (65536 bytes).", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go#L314-L322", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "OverloadID", "code": "func (zero *evalZeroArity) OverloadID() string {\n\treturn zero.overload\n}", "docstring": "// OverloadID implements the InterpretableCall interface method.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/google/cel-go/interpreter/interpretable.go#L391-L393", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetTotalMilestones", "code": "func (m *MilestoneStats) GetTotalMilestones() int {\n\tif m == nil || m.TotalMilestones == nil {\n\t\treturn 0\n\t}\n\treturn *m.TotalMilestones\n}", "docstring": "// GetTotalMilestones returns the TotalMilestones field if it's non-nil, zero value otherwise.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/google/go-github/v30/github/github-accessors.go#L6328-L6333", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "decompress1x_main_loop_bmi2", "code": "func decompress1x_main_loop_bmi2(ctx *decompress1xContext)", "docstring": "// decompress4x_main_loop_x86 is an x86 with BMI2 assembler implementation\n// of Decompress1X when tablelog > 8.\n//\n//go:noescape", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/klauspost/compress/huff0/decompress_amd64.go#L158-L158", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Hyperlink", "code": "func (o *Output) Hyperlink(link, name string) string {\n\treturn OSC + \"8;;\" + link + ST + name + OSC + \"8;;\" + ST\n}", "docstring": "// Hyperlink creates a hyperlink using OSC8.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/muesli/termenv/hyperlink.go#L9-L11", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BeTrueBecause", "code": "func BeTrueBecause(format string, args ...any) types.GomegaMatcher {\n\treturn &matchers.BeTrueMatcher{Reason: fmt.Sprintf(format, args...)}\n}", "docstring": "// BeTrueBecause succeeds if actual is true and displays the provided reason if it is false\n// fmt.Sprintf is used to render the reason", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/onsi/gomega/matchers.go#L71-L73", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UseConcurrentReads", "code": "func UseConcurrentReads(value bool) ClientOption {\n\treturn func(c *Client) error {\n\t\tc.disableConcurrentReads = !value\n\t\treturn nil\n\t}\n}", "docstring": "// UseConcurrentReads allows the Client to perform concurrent Reads.\n//\n// Concurrent reads are generally safe to use and not using them will degrade\n// performance, so this option is enabled by default.\n//\n// When enabled, WriteTo will use Stat/Fstat to get the file size and determines\n// how many concurrent workers to use.\n// Some \"read once\" servers will delete the file if they receive a stat call on an\n// open file and then the download will fail.\n// Disabling concurrent reads you will be able to download files from these servers.\n// If concurrent reads are disabled, the UseFstat option is ignored.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/pkg/sftp/client.go#L127-L132", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "newValueFunc", "code": "func newValueFunc(desc *Desc, valueType ValueType, function func() float64) *valueFunc {\n\tresult := &valueFunc{\n\t\tdesc: desc,\n\t\tvalType: valueType,\n\t\tfunction: function,\n\t\tlabelPairs: MakeLabelPairs(desc, nil),\n\t}\n\tresult.init(result)\n\treturn result\n}", "docstring": "// newValueFunc returns a newly allocated valueFunc with the given Desc and\n// ValueType. The value reported is determined by calling the given function\n// from within the Write method. Take into account that metric collection may\n// happen concurrently. If that results in concurrent calls to Write, like in\n// the case where a valueFunc is directly registered with Prometheus, the\n// provided function must be concurrency-safe.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/prometheus/client_golang/prometheus/value.go#L79-L88", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "servicePath", "code": "func (linux *systemDRecord) servicePath() string {\n\treturn \"/etc/systemd/system/\" + linux.name + \".service\"\n}", "docstring": "// Standard service path for systemD daemons", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/github.com/takama/daemon/daemon_linux_systemd.go#L24-L26", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WithInt64Callback", "code": "func WithInt64Callback(callback Int64Callback) Int64ObservableOption {\n\treturn int64CallbackOpt{callback}\n}", "docstring": "// WithInt64Callback adds callback to be called for an instrument.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/go.opentelemetry.io/otel/metric/asyncint64.go#L256-L258", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FaaSName", "code": "func FaaSName(val string) attribute.KeyValue {\n\treturn FaaSNameKey.String(val)\n}", "docstring": "// FaaSName returns an attribute KeyValue conforming to the \"faas.name\"\n// semantic conventions. It represents the name of the single function that\n// this runtime instance executes.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/go.opentelemetry.io/otel/semconv/v1.20.0/resource.go#L882-L884", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "EnduserRole", "code": "func EnduserRole(val string) attribute.KeyValue {\n\treturn EnduserRoleKey.String(val)\n}", "docstring": "// EnduserRole returns an attribute KeyValue conforming to the\n// \"enduser.role\" semantic conventions. It represents the actual/assumed role\n// the client is making the request under extracted from token or application\n// security context.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/go.opentelemetry.io/otel/semconv/v1.20.0/trace.go#L1302-L1304", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Verify", "code": "func Verify(mac *[16]byte, m []byte, key *[32]byte) bool {\n\tvar tmp [16]byte\n\tSum(&tmp, m, key)\n\treturn subtle.ConstantTimeCompare(tmp[:], mac[:]) == 1\n}", "docstring": "// Verify returns true if mac is a valid authenticator for m with the given key.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/golang.org/x/crypto/internal/poly1305/poly1305.go#L35-L39", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Setdomainname", "code": "func Setdomainname(p []byte) (err error) {\n\tvar _p0 *byte\n\tif len(p) > 0 {\n\t\t_p0 = &p[0]\n\t}\n\t_, e1 := callsetdomainname(uintptr(unsafe.Pointer(_p0)), len(p))\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64.go#L569-L579", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Dup2", "code": "func Dup2(from int, to int) (err error) {\n\t_, _, e1 := syscall_syscall(libc_dup2_trampoline_addr, uintptr(from), uintptr(to), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_dup dup \"libc.so\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go#L799-L805", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Creat", "code": "func Creat(path string, mode uint32) (fd int, err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\truntime.EnterSyscall()\n\tr0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS___CREAT_A<<4, uintptr(unsafe.Pointer(_p0)), uintptr(mode))\n\truntime.ExitSyscall()\n\tfd = int(r0)\n\tif int64(r0) == -1 {\n\t\terr = errnoErr2(e1, e2)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go#L664-L678", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "callsetregid", "code": "func callsetregid(rgid int, egid int) (r1 uintptr, e1 Errno) {\n\tr1, _, e1 = rawSyscall6(uintptr(unsafe.Pointer(&libc_setregid)), 2, uintptr(rgid), uintptr(egid), 0, 0, 0, 0)\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gc.go#L919-L922", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "yaml_emitter_emit_scalar", "code": "func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool {\n\tif !yaml_emitter_select_scalar_style(emitter, event) {\n\t\treturn false\n\t}\n\tif !yaml_emitter_process_anchor(emitter) {\n\t\treturn false\n\t}\n\tif !yaml_emitter_process_tag(emitter) {\n\t\treturn false\n\t}\n\tif !yaml_emitter_increase_indent(emitter, true, false) {\n\t\treturn false\n\t}\n\tif !yaml_emitter_process_scalar(emitter) {\n\t\treturn false\n\t}\n\temitter.indent = emitter.indents[len(emitter.indents)-1]\n\temitter.indents = emitter.indents[:len(emitter.indents)-1]\n\temitter.state = emitter.states[len(emitter.states)-1]\n\temitter.states = emitter.states[:len(emitter.states)-1]\n\treturn true\n}", "docstring": "// Expect SCALAR.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/gopkg.in/yaml.v2/emitterc.go#L683-L704", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Convert_apiextensions_CustomResourceDefinitionCondition_To_v1_CustomResourceDefinitionCondition", "code": "func Convert_apiextensions_CustomResourceDefinitionCondition_To_v1_CustomResourceDefinitionCondition(in *apiextensions.CustomResourceDefinitionCondition, out *CustomResourceDefinitionCondition, s conversion.Scope) error {\n\treturn autoConvert_apiextensions_CustomResourceDefinitionCondition_To_v1_CustomResourceDefinitionCondition(in, out, s)\n}", "docstring": "// Convert_apiextensions_CustomResourceDefinitionCondition_To_v1_CustomResourceDefinitionCondition is an autogenerated conversion function.", "url": "https://github.com/loft-sh/devpod/blob/0ddf4f2cd7137dd6abaaecbab22943126a37a182/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.conversion.go#L377-L379", "sha": "0ddf4f2cd7137dd6abaaecbab22943126a37a182"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewOptComputeSkipReason", "code": "func NewOptComputeSkipReason(v ComputeSkipReason) OptComputeSkipReason {\n\treturn OptComputeSkipReason{\n\t\tValue: v,\n\t\tSet: true,\n\t}\n}", "docstring": "// NewOptComputeSkipReason returns new OptComputeSkipReason with value set to v.", "url": "https://github.com/tonkeeper/opentonapi/blob/ff7ba60485b4637dc02d96ffa62e71d6ee69f947/pkg/oas/oas_schemas_gen.go#L11936-L11941", "sha": "ff7ba60485b4637dc02d96ffa62e71d6ee69f947"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RecordID", "code": "func RecordID(ctx context.Context, id int64) (DomainFeedback, error) {\n\te := DomainFeedback{ID: id}\n\terr := ReportsDB.Get(ctx, &e)\n\treturn e, err\n}", "docstring": "// RecordID returns the report for the ID.", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/dmarcdb/reports.go#L111-L115", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "count", "code": "func (r *elemRef) count() int {\n\tif r.node != nil {\n\t\treturn len(r.node.inodes)\n\t}\n\treturn int(r.page.count)\n}", "docstring": "// count returns the number of inodes or page elements.", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/go.etcd.io/bbolt/cursor.go#L424-L429", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ValidHostHeader", "code": "func ValidHostHeader(h string) bool {\n\t// The latest spec is actually this:\n\t//\n\t// http://tools.ietf.org/html/rfc7230#section-5.4\n\t// Host = uri-host [ \":\" port ]\n\t//\n\t// Where uri-host is:\n\t// http://tools.ietf.org/html/rfc3986#section-3.2.2\n\t//\n\t// But we're going to be much more lenient for now and just\n\t// search for any byte that's not a valid byte in any of those\n\t// expressions.\n\tfor i := 0; i < len(h); i++ {\n\t\tif !validHostByte[h[i]] {\n\t\t\treturn false\n\t\t}\n\t}\n\treturn true\n}", "docstring": "// ValidHostHeader reports whether h is a valid host header.", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/net/http/httpguts/httplex.go#L209-L227", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "KeyctlGetKeyringID", "code": "func KeyctlGetKeyringID(id int, create bool) (ringid int, err error) {\n\tcreateInt := 0\n\tif create {\n\t\tcreateInt = 1\n\t}\n\treturn KeyctlInt(KEYCTL_GET_KEYRING_ID, id, createInt, 0, 0)\n}", "docstring": "// Keyctl commands with special signatures.\n// KeyctlGetKeyringID implements the KEYCTL_GET_KEYRING_ID command.\n// See the full documentation at:\n// http://man7.org/linux/man-pages/man3/keyctl_get_keyring_ID.3.html", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/sys/unix/syscall_linux.go#L1458-L1464", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Kill", "code": "func Kill(pid int, sig Signal) (err error) {\n\tr0, er := C.kill(C.int(pid), C.int(sig))\n\tif r0 == -1 && er != nil {\n\t\terr = er\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc.go#L467-L473", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "read", "code": "func read(fd int, p []byte) (n int, err error) {\n\tvar _p0 *byte\n\tif len(p) > 0 {\n\t\t_p0 = &p[0]\n\t}\n\tr0, e1 := callread(fd, uintptr(unsafe.Pointer(_p0)), len(p))\n\tn = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64.go#L514-L525", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "socketpair", "code": "func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) {\n\t_, _, e1 := RawSyscall6(SYS_SOCKETPAIR, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd)), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go#L435-L441", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Mount", "code": "func Mount(fsType string, dir string, flags int, data unsafe.Pointer) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(fsType)\n\tif err != nil {\n\t\treturn\n\t}\n\tvar _p1 *byte\n\t_p1, err = BytePtrFromString(dir)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := syscall_syscall6(libc_mount_trampoline_addr, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(flags), uintptr(data), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_mknodat mknodat \"libc.so\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go#L1496-L1512", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Getrtable", "code": "func Getrtable() (rtable int, err error) {\n\tr0, _, e1 := syscall_rawSyscall(libc_getrtable_trampoline_addr, 0, 0, 0)\n\trtable = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_getrlimit getrlimit \"libc.so\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go#L1171-L1178", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isTitle", "code": "func isTitle(c *context) bool {\n\tct := c.caseType()\n\tif c.info&hasMappingMask == 0 || ct == cTitle {\n\t\treturn true\n\t}\n\tif c.info&exceptionBit == 0 {\n\t\tif ct == cLower {\n\t\t\tc.err = transform.ErrEndOfSpan\n\t\t\treturn false\n\t\t}\n\t\treturn true\n\t}\n\t// Get the exception data.\n\te := exceptions[c.info>>exceptionShift:]\n\tif nTitle := e[1] & lengthMask; nTitle != noChange {\n\t\tc.err = transform.ErrEndOfSpan\n\t\treturn false\n\t}\n\tnFirst := (e[1] >> lengthBits) & lengthMask\n\tif ct == cLower && nFirst != noChange {\n\t\tc.err = transform.ErrEndOfSpan\n\t\treturn false\n\t}\n\treturn true\n}", "docstring": "// isTitle reports whether the current rune is in title case.", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/text/cases/context.go#L307-L331", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lookupValue", "code": "func (t *nfcTrie) lookupValue(n uint32, b byte) uint16 {\n\tswitch {\n\tcase n < 46:\n\t\treturn uint16(nfcValues[n<<6+uint32(b)])\n\tdefault:\n\t\tn -= 46\n\t\treturn uint16(nfcSparse.lookup(n, b))\n\t}\n}", "docstring": "// lookupValue determines the type of block n and looks up the value for b.", "url": "https://github.com/mjl-/mox/blob/93b627ceab9b90cb3669d0adc5d6dc208181fc7a/vendor/golang.org/x/text/unicode/norm/tables15.0.0.go#L2956-L2964", "sha": "93b627ceab9b90cb3669d0adc5d6dc208181fc7a"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "brokerMessageToV2Issue49ChatSubscribeMessage", "code": "func brokerMessageToV2Issue49ChatSubscribeMessage(bMsg extensions.BrokerMessage) (V2Issue49ChatSubscribeMessage, error) {\n\tvar msg V2Issue49ChatSubscribeMessage\n\n\t// Convert to string\n\tpayload := string(bMsg.Payload)\n\tmsg.Payload = payload // No need for type conversion to reference\n\n\t// TODO: run checks on msg type\n\n\treturn msg, nil\n}", "docstring": "// brokerMessageToV2Issue49ChatSubscribeMessage will fill a new V2Issue49ChatSubscribeMessage with data from generic broker message", "url": "https://github.com/lerenn/asyncapi-codegen/blob/e9a7410f9c1bdead862612402fbb5c465e077f3b/test/v2/issues/49/asyncapi.gen.go#L799-L809", "sha": "e9a7410f9c1bdead862612402fbb5c465e077f3b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CodePath", "code": "func (s *State) CodePath() string {\n\tname := MainGo\n\tif s.CellIsTest {\n\t\tname = MainTestGo\n\t}\n\treturn path.Join(s.TempDir, name)\n}", "docstring": "// CodePath is the path to where the code is going to be saved. Either `main.go` or `main_test.go` file.", "url": "https://github.com/janpfeifer/gonb/blob/f52815182d2d551ef2647d8f53b71ee6e4e106bf/internal/goexec/execcode.go#L163-L169", "sha": "f52815182d2d551ef2647d8f53b71ee6e4e106bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DeclareVariable", "code": "func DeclareVariable(decls *Declarations, name, value string) {\n\tdecls.Variables[name] = &Variable{\n\t\tCursor: NoCursor,\n\t\tCellLines: CellLines{},\n\t\tKey: name,\n\t\tName: name,\n\t\tValueDefinition: value,\n\t}\n}", "docstring": "// DeclareVariable creates a variable definition in `decls`.\n// `value` is copied verbatim, so any type of variable goes.", "url": "https://github.com/janpfeifer/gonb/blob/f52815182d2d551ef2647d8f53b71ee6e4e106bf/internal/goexec/wasm.go#L215-L223", "sha": "f52815182d2d551ef2647d8f53b71ee6e4e106bf"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExecE", "code": "func (wscb *WorkflowStepCreateBulk) ExecE(ctx context.Context, cbs ...func(ctx context.Context, mc ClientSet, created *WorkflowStep) error) error {\n\t_, err := wscb.SaveE(ctx, cbs...)\n\treturn err\n}", "docstring": "// ExecE calls the given function after executed the query,\n// which is always good for cascading create operations.", "url": "https://github.com/seal-io/walrus/blob/5aef1b7525ec717b197bdd1a2ea84ff10d10cf33/pkg/dao/model/workflowstep_create.go#L757-L760", "sha": "5aef1b7525ec717b197bdd1a2ea84ff10d10cf33"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Scan", "code": "func (wss *WorkflowStageSelect) Scan(ctx context.Context, v any) error {\n\tctx = setContextOp(ctx, wss.ctx, \"Select\")\n\tif err := wss.prepareQuery(ctx); err != nil {\n\t\treturn err\n\t}\n\treturn scanWithInterceptors[*WorkflowStageQuery, *WorkflowStageSelect](ctx, wss.WorkflowStageQuery, wss, wss.inters, v)\n}", "docstring": "// Scan applies the selector query and scans the result into the given value.", "url": "https://github.com/seal-io/walrus/blob/5aef1b7525ec717b197bdd1a2ea84ff10d10cf33/pkg/dao/model/workflowstage_query.go#L803-L809", "sha": "5aef1b7525ec717b197bdd1a2ea84ff10d10cf33"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NameLT", "code": "func NameLT(v string) predicate.ResourceDefinitionMatchingRule {\n\treturn predicate.ResourceDefinitionMatchingRule(sql.FieldLT(FieldName, v))\n}", "docstring": "// NameLT applies the LT predicate on the \"name\" field.", "url": "https://github.com/seal-io/walrus/blob/5aef1b7525ec717b197bdd1a2ea84ff10d10cf33/pkg/dao/model/resourcedefinitionmatchingrule/where.go#L311-L313", "sha": "5aef1b7525ec717b197bdd1a2ea84ff10d10cf33"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ProjectIDIn", "code": "func ProjectIDIn(vs ...object.ID) predicate.ResourceRun {\n\treturn predicate.ResourceRun(sql.FieldIn(FieldProjectID, vs...))\n}", "docstring": "// ProjectIDIn applies the In predicate on the \"project_id\" field.", "url": "https://github.com/seal-io/walrus/blob/5aef1b7525ec717b197bdd1a2ea84ff10d10cf33/pkg/dao/model/resourcerun/where.go#L217-L219", "sha": "5aef1b7525ec717b197bdd1a2ea84ff10d10cf33"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TemplateIDNotIn", "code": "func TemplateIDNotIn(vs ...object.ID) predicate.ResourceRun {\n\treturn predicate.ResourceRun(sql.FieldNotIn(FieldTemplateID, vs...))\n}", "docstring": "// TemplateIDNotIn applies the NotIn predicate on the \"template_id\" field.", "url": "https://github.com/seal-io/walrus/blob/5aef1b7525ec717b197bdd1a2ea84ff10d10cf33/pkg/dao/model/resourcerun/where.go#L562-L564", "sha": "5aef1b7525ec717b197bdd1a2ea84ff10d10cf33"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CreatedByNEQ", "code": "func CreatedByNEQ(v string) predicate.ResourceRun {\n\treturn predicate.ResourceRun(sql.FieldNEQ(FieldCreatedBy, v))\n}", "docstring": "// CreatedByNEQ applies the NEQ predicate on the \"created_by\" field.", "url": "https://github.com/seal-io/walrus/blob/5aef1b7525ec717b197bdd1a2ea84ff10d10cf33/pkg/dao/model/resourcerun/where.go#L1092-L1094", "sha": "5aef1b7525ec717b197bdd1a2ea84ff10d10cf33"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TypeContainsFold", "code": "func TypeContainsFold(v string) predicate.WorkflowStep {\n\treturn predicate.WorkflowStep(sql.FieldContainsFold(FieldType, v))\n}", "docstring": "// TypeContainsFold applies the ContainsFold predicate on the \"type\" field.", "url": "https://github.com/seal-io/walrus/blob/5aef1b7525ec717b197bdd1a2ea84ff10d10cf33/pkg/dao/model/workflowstep/where.go#L415-L417", "sha": "5aef1b7525ec717b197bdd1a2ea84ff10d10cf33"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ValidateWith", "code": "func (i Values) ValidateWith(schema *openapi3.Schema) error {\n\tif schema == nil {\n\t\treturn nil\n\t}\n\n\t// Check required and undefined.\n\tl := sets.StringKeySet(i)\n\tr := sets.NewString(schema.Required...)\n\ta := sets.StringKeySet(schema.Properties)\n\n\tif diff := r.Difference(l).UnsortedList(); len(diff) != 0 {\n\t\treturn fmt.Errorf(\"not found required values %v\", diff)\n\t}\n\n\tif diff := l.Difference(a).UnsortedList(); len(diff) != 0 {\n\t\treturn fmt.Errorf(\"found undefiend values %v\", diff)\n\t}\n\n\t// Validate.\n\tfor n, v := range i {\n\t\tif schema.Properties[n] == nil || schema.Properties[n].Value == nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar (\n\t\t\ts = schema.Properties[n].Value\n\t\t\terrTypeFunc = func(name string, ok bool, err error, expectedType string, actualValue any) error {\n\t\t\t\tif !ok {\n\t\t\t\t\treturn fmt.Errorf(\"%s is not type %s, actual value: %v\", name, expectedType, actualValue)\n\t\t\t\t}\n\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"failed to convert %s to %s: %w\", name, expectedType, err)\n\t\t\t\t}\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tvalidateSchemaFunc = func(name string, val any) error {\n\t\t\t\terr := s.VisitJSON(val)\n\t\t\t\tif err != nil {\n\t\t\t\t\tvar e *openapi3.SchemaError\n\t\t\t\t\tif errors.As(err, &e) {\n\t\t\t\t\t\treturn errorx.Errorf(\"invalid %s: %v\", name, e.Reason)\n\t\t\t\t\t}\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\treturn nil\n\t\t\t}\n\t\t)\n\n\t\tvar (\n\t\t\tval any\n\t\t\tok bool\n\t\t\terr error\n\t\t)\n\n\t\tswitch {\n\t\tdefault:\n\t\t\t_, ok, err := GetAny[any](v)\n\t\t\tif !ok || err != nil {\n\t\t\t\treturn errTypeFunc(n, ok, err, s.Type, v)\n\t\t\t}\n\n\t\t\tcontinue\n\t\tcase s.Type == openapi3.TypeString:\n\t\t\tval, ok, err = GetString(v)\n\t\t\tif !ok || err != nil {\n\t\t\t\treturn errTypeFunc(n, ok, err, s.Type, v)\n\t\t\t}\n\t\tcase s.Type == openapi3.TypeBoolean:\n\t\t\tval, ok, err = GetBool(v)\n\t\t\tif !ok || err != nil {\n\t\t\t\treturn errTypeFunc(n, ok, err, s.Type, v)\n\t\t\t}\n\t\tcase s.Type == openapi3.TypeInteger:\n\t\t\tval, ok, err = GetInt(v)\n\t\t\tif !ok || err != nil {\n\t\t\t\treturn errTypeFunc(n, ok, err, s.Type, v)\n\t\t\t}\n\t\tcase s.Type == openapi3.TypeNumber:\n\t\t\tval, ok, err = GetNumber(v)\n\t\t\tif !ok || err != nil {\n\t\t\t\treturn errTypeFunc(n, ok, err, s.Type, v)\n\t\t\t}\n\t\tcase HasAnyTypes(s):\n\t\t\t// Skip validate for any type.\n\t\t\tcontinue\n\t\tcase s.Type == openapi3.TypeArray:\n\t\t\tval, ok, err = GetSlice[any](v)\n\t\t\tif !ok || err != nil {\n\t\t\t\treturn errTypeFunc(n, ok, err, s.Type, v)\n\t\t\t}\n\t\tcase s.Type == openapi3.TypeObject:\n\t\t\tval, ok, err = GetMap[any](v)\n\t\t\tif !ok || err != nil {\n\t\t\t\treturn errTypeFunc(n, ok, err, s.Type, v)\n\t\t\t}\n\t\t}\n\n\t\terr = validateSchemaFunc(n, val)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "docstring": "// ValidateWith validates the property value with the given schemas.", "url": "https://github.com/seal-io/walrus/blob/5aef1b7525ec717b197bdd1a2ea84ff10d10cf33/pkg/dao/types/property/field.go#L43-L148", "sha": "5aef1b7525ec717b197bdd1a2ea84ff10d10cf33"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewBinaryLiteralFromUint", "code": "func NewBinaryLiteralFromUint(value uint64, byteSize int) BinaryLiteral {\n\tif byteSize != -1 && (byteSize < 1 || byteSize > 8) {\n\t\tpanic(\"Invalid byteSize\")\n\t}\n\tbuf := make([]byte, 8)\n\tbinary.BigEndian.PutUint64(buf, value)\n\tif byteSize == -1 {\n\t\tbuf = trimLeadingZeroBytes(buf)\n\t} else {\n\t\tbuf = buf[8-byteSize:]\n\t}\n\treturn buf\n}", "docstring": "// NewBinaryLiteralFromUint creates a new BinaryLiteral instance by the given uint value in BitEndian.\n// byteSize will be used as the length of the new BinaryLiteral, with leading bytes filled to zero.\n// If byteSize is -1, the leading zeros in new BinaryLiteral will be trimmed.", "url": "https://github.com/secretflow/scql/blob/8ed4e6acc212b9d29b90d5dcdc91216e71424a08/pkg/types/binary_literal.go#L58-L70", "sha": "8ed4e6acc212b9d29b90d5dcdc91216e71424a08"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ParseDatetimeFromNum", "code": "func ParseDatetimeFromNum(sc *stmtctx.StatementContext, num int64) (Time, error) {\n\treturn ParseTimeFromNum(sc, num, mysql.TypeDatetime, DefaultFsp)\n}", "docstring": "// ParseDatetimeFromNum is a helper function wrapping ParseTimeFromNum with datetime type and default fsp.", "url": "https://github.com/secretflow/scql/blob/8ed4e6acc212b9d29b90d5dcdc91216e71424a08/pkg/types/time.go#L1536-L1538", "sha": "8ed4e6acc212b9d29b90d5dcdc91216e71424a08"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MatchesPodSpec", "code": "func MatchesPodSpec(rules []types.ResourceRule) bool {\n\tfor _, r := range rules {\n\t\tgr := r.ToGVR().GroupResource()\n\t\tif defaultPodSpecResources[gr] {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "docstring": "// MatchesPodSpec returns true if any rule matches a Pod spec", "url": "https://github.com/undistro/marvin/blob/9cf36105c76fb8b8548d352c031cd47e11ea8aff/pkg/validator/podspec.go#L32-L40", "sha": "9cf36105c76fb8b8548d352c031cd47e11ea8aff"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WithVectorDimensions", "code": "func WithVectorDimensions(size int) Option {\n\treturn func(p *Store) {\n\t\tp.vectorDimensions = size\n\t}\n}", "docstring": "// WithVectorDimensions is an option for specifying the vector size.", "url": "https://github.com/tmc/langchaingo/blob/0672790bb23a2c7e546a4a7aeffc9bef5bbd8c0b/vectorstores/pgvector/options.go#L83-L87", "sha": "0672790bb23a2c7e546a4a7aeffc9bef5bbd8c0b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MappedBits", "code": "func MappedBits[T comparable](values []T, maps map[T]Bits, opts ...BuilderOpt[T]) Bits {\n\tbo := builderOpts[T]{}\n\tfor _, opt := range opts {\n\t\topt(&bo)\n\t}\n\n\tb := Bits(0)\n\tfor _, value := range values {\n\t\tfor _, t := range bo.transform {\n\t\t\tvalue = t(value)\n\t\t}\n\t\tif val, ok := maps[value]; ok {\n\t\t\tb |= val\n\t\t}\n\t}\n\treturn b\n}", "docstring": "// MappedBits builds a Bits map from a set of values (e.g. strings) that are mapped in the form\n// value --> corresponding Bits value\n// in the \"maps\" constructor argument", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/pkg/internal/helpers/maps/bits.go#L24-L40", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MarshalText", "code": "func MarshalText(w io.Writer, m Message) error { return defaultTextMarshaler.Marshal(w, m) }", "docstring": "// MarshalText writes the proto text format of m to w.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/backend/vendor/github.com/golang/protobuf/proto/text_encode.go#L97-L97", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "xorInGeneric", "code": "func xorInGeneric(d *state, buf []byte) {\n\tn := len(buf) / 8\n\n\tfor i := 0; i < n; i++ {\n\t\ta := binary.LittleEndian.Uint64(buf)\n\t\td.a[i] ^= a\n\t\tbuf = buf[8:]\n\t}\n}", "docstring": "// xorInGeneric xors the bytes in buf into the state; it\n// makes no non-portable assumptions about memory layout\n// or alignment.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/backend/vendor/golang.org/x/crypto/sha3/xor_generic.go#L12-L20", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Symlink", "code": "func Symlink(path string, link string) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\tvar _p1 *byte\n\t_p1, err = BytePtrFromString(link)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := Syscall(SYS_SYMLINK, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/backend/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm64.go#L1681-L1697", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Time", "code": "func Time(t *Time_t) (tt Time_t, err error) {\n\tr0, _, e1 := RawSyscall(SYS_TIME, uintptr(unsafe.Pointer(t)), 0, 0)\n\ttt = Time_t(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/backend/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go#L565-L572", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "socket", "code": "func socket(domain int, typ int, proto int) (fd int, err error) {\n\tr0, _, e1 := RawSyscall(SYS_SOCKET, uintptr(domain), uintptr(typ), uintptr(proto))\n\tfd = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/backend/vendor/golang.org/x/sys/unix/zsyscall_netbsd_386.go#L81-L88", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "munmap", "code": "func munmap(addr uintptr, length uintptr) (err error) {\n\t_, _, e1 := syscall_syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/backend/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go#L250-L256", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Renameat", "code": "func Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(oldpath)\n\tif err != nil {\n\t\treturn\n\t}\n\tvar _p1 *byte\n\t_p1, err = BytePtrFromString(newpath)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := Syscall6(SYS_RENAMEAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/backend/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go#L247-L263", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewClientTLSFromCert", "code": "func NewClientTLSFromCert(cp *x509.CertPool, serverNameOverride string) TransportCredentials {\n\treturn NewTLS(&tls.Config{ServerName: serverNameOverride, RootCAs: cp})\n}", "docstring": "// NewClientTLSFromCert constructs TLS credentials from the provided root\n// certificate authority certificate(s) to validate server connections. If\n// certificates to establish the identity of the client need to be included in\n// the credentials (eg: for mTLS), use NewTLS instead, where a complete\n// tls.Config can be specified.\n// serverNameOverride is for testing only. If set to a non empty string,\n// it will override the virtual host name of authority (e.g. :authority header\n// field) in requests.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/backend/vendor/google.golang.org/grpc/credentials/tls.go#L156-L158", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Close", "code": "func Close(fd int) (err error) {\n\t_, _, e1 := syscall_syscall(libc_close_trampoline_addr, uintptr(fd), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_clock_gettime clock_gettime \"libc.so\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/test/integration/components/old_grpc/worker/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go#L721-L727", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Swap", "code": "func (l *HandlerList) Swap(name string, replace NamedHandler) bool {\n\tvar swapped bool\n\n\tfor i := 0; i < len(l.list); i++ {\n\t\tif l.list[i].Name == name {\n\t\t\tl.list[i] = replace\n\t\t\tswapped = true\n\t\t}\n\t}\n\n\treturn swapped\n}", "docstring": "// Swap will swap out all handlers matching the name passed in. The matched\n// handlers will be swapped in. True is returned if the handlers were swapped.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go#L234-L245", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mapassign_faststr", "code": "func mapassign_faststr(t *rt.GoType, h unsafe.Pointer, s string) unsafe.Pointer", "docstring": "//go:linkname mapassign_faststr runtime.mapassign_faststr\n//goland:noinspection GoUnusedParameter", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go#L101-L101", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AddHeader", "code": "func (r Response) AddHeader(header string, value string) Response {\n\tr.Header().Add(header, value)\n\treturn r\n}", "docstring": "// AddHeader is a shortcut for .Header().Add(header,value)", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/emicklei/go-restful/v3/response.go#L71-L74", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Any", "code": "func (group *RouterGroup) Any(relativePath string, handlers ...HandlerFunc) IRoutes {\n\tfor _, method := range anyMethods {\n\t\tgroup.handle(method, relativePath, handlers)\n\t}\n\n\treturn group.returnObj()\n}", "docstring": "// Any registers a route that matches all the HTTP methods.\n// GET, POST, PUT, PATCH, HEAD, OPTIONS, DELETE, CONNECT, TRACE.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/gin-gonic/gin/routergroup.go#L147-L153", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Float64Value", "code": "func Float64Value(v *float64) float64 {\n\tif v != nil {\n\t\treturn *v\n\t}\n\treturn 0\n}", "docstring": "// Float64Value returns the value of the float64 pointer passed in or\n// 0 if the pointer is nil.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/go-openapi/swag/convert_types.go#L621-L626", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isTIGER128", "code": "func isTIGER128(fl FieldLevel) bool {\n\treturn tiger128Regex.MatchString(fl.Field().String())\n}", "docstring": "// isTIGER128 is the validation function for validating if the field's value is a valid TIGER128.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/go-playground/validator/v10/baked_in.go#L589-L591", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "appendMerge", "code": "func appendMerge(target, sub []int) []int {\n\tlt, ls := len(target), len(sub)\n\tout := make([]int, 0, lt+ls)\n\n\tfor x, y := 0, 0; x < lt || y < ls; {\n\t\tif x >= lt {\n\t\t\tout = append(out, sub[y:]...)\n\t\t\tbreak\n\t\t}\n\n\t\tif y >= ls {\n\t\t\tout = append(out, target[x:]...)\n\t\t\tbreak\n\t\t}\n\n\t\txValue := target[x]\n\t\tyValue := sub[y]\n\n\t\tswitch {\n\n\t\tcase xValue == yValue:\n\t\t\tout = append(out, xValue)\n\t\t\tx++\n\t\t\ty++\n\n\t\tcase xValue < yValue:\n\t\t\tout = append(out, xValue)\n\t\t\tx++\n\n\t\tcase yValue < xValue:\n\t\t\tout = append(out, yValue)\n\t\t\ty++\n\n\t\t}\n\t}\n\n\ttarget = append(target[:0], out...)\n\n\treturn target\n}", "docstring": "// appendMerge merges and sorts given already SORTED and UNIQUE segments.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/gobwas/glob/match/match.go#L33-L72", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "cleanupSurroundingIdentical", "code": "func cleanupSurroundingIdentical(groups []diffStats, eq func(i, j int) bool) []diffStats {\n\tvar ix, iy int // indexes into sequence x and y\n\tfor i, ds := range groups {\n\t\t// Handle equal group.\n\t\tif ds.NumDiff() == 0 {\n\t\t\tix += ds.NumIdentical\n\t\t\tiy += ds.NumIdentical\n\t\t\tcontinue\n\t\t}\n\n\t\t// Handle unequal group.\n\t\tnx := ds.NumIdentical + ds.NumRemoved + ds.NumModified\n\t\tny := ds.NumIdentical + ds.NumInserted + ds.NumModified\n\t\tvar numLeadingIdentical, numTrailingIdentical int\n\t\tfor j := 0; j < nx && j < ny && eq(ix+j, iy+j); j++ {\n\t\t\tnumLeadingIdentical++\n\t\t}\n\t\tfor j := 0; j < nx && j < ny && eq(ix+nx-1-j, iy+ny-1-j); j++ {\n\t\t\tnumTrailingIdentical++\n\t\t}\n\t\tif numIdentical := numLeadingIdentical + numTrailingIdentical; numIdentical > 0 {\n\t\t\tif numLeadingIdentical > 0 {\n\t\t\t\t// Remove leading identical span from this group and\n\t\t\t\t// insert it into the preceding group.\n\t\t\t\tif i-1 >= 0 {\n\t\t\t\t\tgroups[i-1].NumIdentical += numLeadingIdentical\n\t\t\t\t} else {\n\t\t\t\t\t// No preceding group exists, so prepend a new group,\n\t\t\t\t\t// but do so after we finish iterating over all groups.\n\t\t\t\t\tdefer func() {\n\t\t\t\t\t\tgroups = append([]diffStats{{Name: groups[0].Name, NumIdentical: numLeadingIdentical}}, groups...)\n\t\t\t\t\t}()\n\t\t\t\t}\n\t\t\t\t// Increment indexes since the preceding group would have handled this.\n\t\t\t\tix += numLeadingIdentical\n\t\t\t\tiy += numLeadingIdentical\n\t\t\t}\n\t\t\tif numTrailingIdentical > 0 {\n\t\t\t\t// Remove trailing identical span from this group and\n\t\t\t\t// insert it into the succeeding group.\n\t\t\t\tif i+1 < len(groups) {\n\t\t\t\t\tgroups[i+1].NumIdentical += numTrailingIdentical\n\t\t\t\t} else {\n\t\t\t\t\t// No succeeding group exists, so append a new group,\n\t\t\t\t\t// but do so after we finish iterating over all groups.\n\t\t\t\t\tdefer func() {\n\t\t\t\t\t\tgroups = append(groups, diffStats{Name: groups[len(groups)-1].Name, NumIdentical: numTrailingIdentical})\n\t\t\t\t\t}()\n\t\t\t\t}\n\t\t\t\t// Do not increment indexes since the succeeding group will handle this.\n\t\t\t}\n\n\t\t\t// Update this group since some identical elements were removed.\n\t\t\tnx -= numIdentical\n\t\t\tny -= numIdentical\n\t\t\tgroups[i] = diffStats{Name: ds.Name, NumRemoved: nx, NumInserted: ny}\n\t\t}\n\t\tix += nx\n\t\tiy += ny\n\t}\n\treturn groups\n}", "docstring": "// cleanupSurroundingIdentical scans through all unequal groups, and\n// moves any leading sequence of equal elements to the preceding equal group and\n// moves and trailing sequence of equal elements to the succeeding equal group.\n//\n// This is necessary since coalesceInterveningIdentical may coalesce edit groups\n// together such that leading/trailing spans of equal elements becomes possible.\n// Note that this can occur even with an optimal diffing algorithm.\n//\n// Example:\n//\n//\tInput: [\n//\t\t{NumIdentical: 61},\n//\t\t{NumIdentical: 1 , NumRemoved: 11, NumInserted: 2}, // assume 3 leading identical elements\n//\t\t{NumIdentical: 67},\n//\t\t{NumIdentical: 7, NumRemoved: 12, NumInserted: 3}, // assume 10 trailing identical elements\n//\t\t{NumIdentical: 54},\n//\t]\n//\tOutput: [\n//\t\t{NumIdentical: 64}, // incremented by 3\n//\t\t{NumRemoved: 9},\n//\t\t{NumIdentical: 67},\n//\t\t{NumRemoved: 9},\n//\t\t{NumIdentical: 64}, // incremented by 10\n//\t]", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/google/go-cmp/cmp/report_slices.go#L553-L614", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Setns", "code": "func Setns(ns NsHandle, nstype int) (err error) {\n\treturn ErrNotImplemented\n}", "docstring": "// Setns sets namespace using golang.org/x/sys/unix.Setns on Linux. It\n// is not implemented on other platforms.\n//\n// Deprecated: Use golang.org/x/sys/unix.Setns instead.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/github.com/vishvananda/netns/netns_others.go#L18-L20", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MessagingServicebusMessageEnqueuedTime", "code": "func MessagingServicebusMessageEnqueuedTime(val int) attribute.KeyValue {\n\treturn MessagingServicebusMessageEnqueuedTimeKey.Int(val)\n}", "docstring": "// MessagingServicebusMessageEnqueuedTime returns an attribute KeyValue\n// conforming to the \"messaging.servicebus.message.enqueued_time\" semantic\n// conventions. It represents the UTC epoch seconds at which the message has\n// been accepted and stored in the entity.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/go.opentelemetry.io/otel/semconv/v1.25.0/attribute_group.go#L5776-L5778", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IsValid", "code": "func (t TraceID) IsValid() bool {\n\treturn !bytes.Equal(t[:], nilTraceID[:])\n}", "docstring": "// IsValid checks whether the trace TraceID is valid. A valid trace ID does\n// not consist of zeros only.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/go.opentelemetry.io/otel/trace/trace.go#L43-L45", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "LowercaseLevelEncoder", "code": "func LowercaseLevelEncoder(l Level, enc PrimitiveArrayEncoder) {\n\tenc.AppendString(l.String())\n}", "docstring": "// LowercaseLevelEncoder serializes a Level to a lowercase string. For example,\n// InfoLevel is serialized to \"info\".", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/go.uber.org/zap/zapcore/encoder.go#L47-L49", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ToUnicode", "code": "func ToUnicode(s string) (string, error) {\n\treturn Punycode.process(s, false)\n}", "docstring": "// ToUnicode is a wrapper for Punycode.ToUnicode.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/golang.org/x/net/idna/idna10.0.0.go#L51-L53", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Setegid", "code": "func Setegid(egid int) (err error) {\n\t_, _, e1 := RawSyscall(SYS_SETEGID, uintptr(egid), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go#L1542-L1548", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Seek", "code": "func Seek(fd int, offset int64, whence int) (newoffset int64, err error) {\n\tr0, _, e1 := Syscall(SYS_LSEEK, uintptr(fd), uintptr(offset), uintptr(whence))\n\tnewoffset = int64(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/golang.org/x/sys/unix/zsyscall_freebsd_riscv64.go#L1520-L1527", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sendto", "code": "func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) {\n\tvar _p0 unsafe.Pointer\n\tif len(buf) > 0 {\n\t\t_p0 = unsafe.Pointer(&buf[0])\n\t} else {\n\t\t_p0 = unsafe.Pointer(&_zero)\n\t}\n\t_, _, e1 := Syscall6(SYS_SENDTO, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen))\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go#L168-L180", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ForceCodec", "code": "func ForceCodec(codec encoding.Codec) CallOption {\n\treturn ForceCodecCallOption{Codec: codec}\n}", "docstring": "// ForceCodec returns a CallOption that will set codec to be used for all\n// request and response messages for a call. The result of calling Name() will\n// be used as the content-subtype after converting to lowercase, unless\n// CallContentSubtype is also used.\n//\n// See Content-Type on\n// https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests for\n// more details. Also see the documentation on RegisterCodec and\n// CallContentSubtype for more details on the interaction between Codec and\n// content-subtype.\n//\n// This function is provided for advanced users; prefer to use only\n// CallContentSubtype to select a registered codec instead.\n//\n// # Experimental\n//\n// Notice: This API is EXPERIMENTAL and may be changed or removed in a\n// later release.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/google.golang.org/grpc/rpc_util.go#L498-L500", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IsValid", "code": "func (v Value) IsValid() bool {\n\treturn v.typ != nilType\n}", "docstring": "// IsValid reports whether v is populated with a value.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/google.golang.org/protobuf/reflect/protoreflect/value_union.go#L191-L193", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "addKnownTypes", "code": "func addKnownTypes(scheme *runtime.Scheme) error {\n\tscheme.AddKnownTypes(SchemeGroupVersion,\n\t\t&Ingress{},\n\t\t&IngressList{},\n\t\t&IngressClass{},\n\t\t&IngressClassList{},\n\t)\n\t// Add the watch version that applies\n\tmetav1.AddToGroupVersion(scheme, SchemeGroupVersion)\n\treturn nil\n}", "docstring": "// Adds the list of known types to the given scheme.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/k8s.io/api/networking/v1beta1/register.go#L48-L58", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "diffMaps", "code": "func diffMaps(original, modified map[string]interface{}, schema LookupPatchMeta, diffOptions DiffOptions) (map[string]interface{}, error) {\n\tpatch := map[string]interface{}{}\n\n\t// This will be used to build the $retainKeys directive sent in the patch\n\tretainKeysList := make([]interface{}, 0, len(modified))\n\n\t// Compare each value in the modified map against the value in the original map\n\tfor key, modifiedValue := range modified {\n\t\t// Get the underlying type for pointers\n\t\tif diffOptions.BuildRetainKeysDirective && modifiedValue != nil {\n\t\t\tretainKeysList = append(retainKeysList, key)\n\t\t}\n\n\t\toriginalValue, ok := original[key]\n\t\tif !ok {\n\t\t\t// Key was added, so add to patch\n\t\t\tif !diffOptions.IgnoreChangesAndAdditions {\n\t\t\t\tpatch[key] = modifiedValue\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\t// The patch may have a patch directive\n\t\t// TODO: figure out if we need this. This shouldn't be needed by apply. When would the original map have patch directives in it?\n\t\tfoundDirectiveMarker, err := handleDirectiveMarker(key, originalValue, modifiedValue, patch)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif foundDirectiveMarker {\n\t\t\tcontinue\n\t\t}\n\n\t\tif reflect.TypeOf(originalValue) != reflect.TypeOf(modifiedValue) {\n\t\t\t// Types have changed, so add to patch\n\t\t\tif !diffOptions.IgnoreChangesAndAdditions {\n\t\t\t\tpatch[key] = modifiedValue\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\t// Types are the same, so compare values\n\t\tswitch originalValueTyped := originalValue.(type) {\n\t\tcase map[string]interface{}:\n\t\t\tmodifiedValueTyped := modifiedValue.(map[string]interface{})\n\t\t\terr = handleMapDiff(key, originalValueTyped, modifiedValueTyped, patch, schema, diffOptions)\n\t\tcase []interface{}:\n\t\t\tmodifiedValueTyped := modifiedValue.([]interface{})\n\t\t\terr = handleSliceDiff(key, originalValueTyped, modifiedValueTyped, patch, schema, diffOptions)\n\t\tdefault:\n\t\t\treplacePatchFieldIfNotEqual(key, originalValue, modifiedValue, patch, diffOptions)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tupdatePatchIfMissing(original, modified, patch, diffOptions)\n\t// Insert the retainKeysList iff there are values present in the retainKeysList and\n\t// either of the following is true:\n\t// - the patch is not empty\n\t// - there are additional field in original that need to be cleared\n\tif len(retainKeysList) > 0 &&\n\t\t(len(patch) > 0 || hasAdditionalNewField(original, modified)) {\n\t\tpatch[retainKeysDirective] = sortScalars(retainKeysList)\n\t}\n\treturn patch, nil\n}", "docstring": "// Returns a (recursive) strategic merge patch that yields modified when applied to original.\n// Including:\n// - Adding fields to the patch present in modified, missing from original\n// - Setting fields to the patch present in modified and original with different values\n// - Delete fields present in original, missing from modified through\n// - IFF map field - set to nil in patch\n// - IFF list of maps && merge strategy - use deleteDirective for the elements\n// - IFF list of primitives && merge strategy - use parallel deletion list\n// - IFF list of maps or primitives with replace strategy (default) - set patch value to the value in modified\n// - Build $retainKeys directive for fields with retainKeys patch strategy", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/k8s.io/apimachinery/pkg/util/strategicpatch/patch.go#L168-L234", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExtractReplicaSet", "code": "func ExtractReplicaSet(replicaSet *extensionsv1beta1.ReplicaSet, fieldManager string) (*ReplicaSetApplyConfiguration, error) {\n\treturn extractReplicaSet(replicaSet, fieldManager, \"\")\n}", "docstring": "// ExtractReplicaSet extracts the applied configuration owned by fieldManager from\n// replicaSet. If no managedFields are found in replicaSet for fieldManager, a\n// ReplicaSetApplyConfiguration is returned with only the Name, Namespace (if applicable),\n// APIVersion and Kind populated. It is possible that no managed fields were found for because other\n// field managers have taken ownership of all the fields previously owned by fieldManager, or because\n// the fieldManager never owned fields any fields.\n// replicaSet must be a unmodified ReplicaSet API object that was retrieved from the Kubernetes API.\n// ExtractReplicaSet provides a way to perform a extract/modify-in-place/apply workflow.\n// Note that an extracted apply configuration will contain fewer fields than what the fieldManager previously\n// applied if another fieldManager has updated or force applied any of the previously applied fields.\n// Experimental!", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/k8s.io/client-go/applyconfigurations/extensions/v1beta1/replicaset.go#L61-L63", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IsValidURI", "code": "func (r *Ref) IsValidURI(basepaths ...string) bool {\n\tif r.String() == \"\" {\n\t\treturn true\n\t}\n\n\tv := r.RemoteURI()\n\tif v == \"\" {\n\t\treturn true\n\t}\n\n\tif r.HasFullURL {\n\t\trr, err := http.Get(v)\n\t\tif err != nil {\n\t\t\treturn false\n\t\t}\n\n\t\treturn rr.StatusCode/100 == 2\n\t}\n\n\tif !(r.HasFileScheme || r.HasFullFilePath || r.HasURLPathOnly) {\n\t\treturn false\n\t}\n\n\t// check for local file\n\tpth := v\n\tif r.HasURLPathOnly {\n\t\tbase := \".\"\n\t\tif len(basepaths) > 0 {\n\t\t\tbase = filepath.Dir(filepath.Join(basepaths...))\n\t\t}\n\t\tp, e := filepath.Abs(filepath.ToSlash(filepath.Join(base, pth)))\n\t\tif e != nil {\n\t\t\treturn false\n\t\t}\n\t\tpth = p\n\t}\n\n\tfi, err := os.Stat(filepath.ToSlash(pth))\n\tif err != nil {\n\t\treturn false\n\t}\n\n\treturn !fi.IsDir()\n}", "docstring": "// IsValidURI returns true when the url the ref points to can be found", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/k8s.io/kube-openapi/pkg/validation/spec/ref.go#L60-L103", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MultiMutatingHandler", "code": "func MultiMutatingHandler(handlers ...Handler) Handler {\n\treturn multiMutating(handlers)\n}", "docstring": "// MultiMutatingHandler combines multiple mutating webhook handlers into a single\n// mutating webhook handler. Handlers are called in sequential order, and the first\n// `allowed: false`\tresponse may short-circuit the rest. Users must take care to\n// ensure patches are disjoint.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/sigs.k8s.io/controller-runtime/pkg/webhook/admission/multi.go#L67-L69", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "fileExists", "code": "func fileExists(filePath string) bool {\n\tif _, err := os.Stat(filePath); err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}", "docstring": "// fileExists reports whether the named file or directory exists.", "url": "https://github.com/grafana/beyla/blob/2f2517bd9e5824bcd315292e62102f361a216434/vendor/sigs.k8s.io/e2e-framework/klient/conf/config.go#L144-L152", "sha": "2f2517bd9e5824bcd315292e62102f361a216434"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Save", "code": "func (u userPrivateInfoDo) Save(values ...*model.UserPrivateInfo) error {\n\tif len(values) == 0 {\n\t\treturn nil\n\t}\n\treturn u.DO.Save(values)\n}", "docstring": "// Save : !!! underlying implementation is different with GORM\n// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)", "url": "https://github.com/diggerhq/digger/blob/aa90fe3a96c897be124388d4e47642a7b2aa765b/next/models_generated/user_private_info.gen.go#L265-L270", "sha": "aa90fe3a96c897be124388d4e47642a7b2aa765b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewDBPathsFromData", "code": "func NewDBPathsFromData(paths []string, target_sizes []uint64) []*DBPath {\n\tdbpaths := make([]*DBPath, len(paths))\n\tfor i, path := range paths {\n\t\ttargetSize := target_sizes[i]\n\t\tdbpaths[i] = NewDBPath(path, targetSize)\n\t}\n\n\treturn dbpaths\n}", "docstring": "// NewDBPathsFromData creates a slice with allocated DBPath objects\n// from paths and target_sizes.", "url": "https://github.com/paypal/junodb/blob/9750751a5335111b7a536326df5925e78cad245d/third_party/patches/forked/tecbot/gorocksdb/dbpath.go#L33-L41", "sha": "9750751a5335111b7a536326df5925e78cad245d"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "isNewline", "code": "func isNewline(lg lineGroup) bool {\n\treturn len(lg.comment) == 0 && len(lg.lines) == 1 && strings.TrimSpace(lg.lines[0]) == \"\"\n}", "docstring": "// isNewline determines if lg is just an empty line.", "url": "https://github.com/google/keep-sorted/blob/8d641518728d604fd965ca442704d0356912e76b/keepsorted/block.go#L328-L330", "sha": "8d641518728d604fd965ca442704d0356912e76b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Validate", "code": "func (m *Fixed32NotIn) Validate() error {\n\treturn m.validate(false)\n}", "docstring": "// Validate checks the field values on Fixed32NotIn with the rules defined in\n// the proto definition for this message. If any rules are violated, the first\n// error encountered is returned, or nil if there are no violations.", "url": "https://github.com/ml444/gkit/blob/6ce63f4114065d74e1eecc6d3ab578c60d8a1681/cmd/protoc-gen-go-validate/tests/cases/numbers_validate.pb.go#L4160-L4162", "sha": "6ce63f4114065d74e1eecc6d3ab578c60d8a1681"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TimeValueMap", "code": "func TimeValueMap(src map[string]*time.Time) map[string]time.Time {\n\tdst := make(map[string]time.Time)\n\tfor k, val := range src {\n\t\tif val != nil {\n\t\t\tdst[k] = *val\n\t\t}\n\t}\n\treturn dst\n}", "docstring": "// TimeValueMap converts a string map of time.Time pointers into a string\n// map of time.Time values", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/github.com/go-openapi/swag/convert_types.go#L722-L730", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RegisterFieldEncoder", "code": "func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {\n\tfieldEncoders[fmt.Sprintf(\"%s/%s\", typ, field)] = encoder\n}", "docstring": "// RegisterFieldEncoder register TypeEncoder for a struct field", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/github.com/json-iterator/go/reflect_extension.go#L234-L236", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Size", "code": "func (b *Buffer) Size() int {\n\tsize := len(b.Buf)\n\tfor _, buf := range b.bufs {\n\t\tsize += len(buf)\n\t}\n\treturn size\n}", "docstring": "// Size computes the size of a buffer by adding sizes of every chunk.", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/github.com/mailru/easyjson/buffer/pool.go#L161-L167", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "socket", "code": "func socket(domain int, typ int, proto int) (fd int, err error) {\n\tr0, er := C.socket(C.int(domain), C.int(typ), C.int(proto))\n\tfd = int(r0)\n\tif r0 == -1 && er != nil {\n\t\terr = er\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc.go#L1155-L1162", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Fadvise", "code": "func Fadvise(fd int, offset int64, length int64, advice int) (err error) {\n\t_, _, e1 := Syscall6(SYS_FADVISE64, uintptr(fd), uintptr(offset), uintptr(length), uintptr(advice), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go#L65-L71", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Utime", "code": "func Utime(path string, buf *Utimbuf) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := Syscall(SYS_UTIME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go#L575-L586", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IoctlGetPtmget", "code": "func IoctlGetPtmget(fd int, req uint) (*Ptmget, error) {\n\tvar value Ptmget\n\terr := ioctlPtr(fd, req, unsafe.Pointer(&value))\n\treturn &value, err\n}", "docstring": "//sys\tioctl(fd int, req uint, arg uintptr) (err error)\n//sys\tioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) = SYS_IOCTL\n//sys\tsysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/golang.org/x/sys/unix/syscall_netbsd.go#L184-L188", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "getgroups", "code": "func getgroups(ngid int, gid *_Gid_t) (n int, err error) {\n\tr0, _, e1 := syscall_rawSyscall(libc_getgroups_trampoline_addr, uintptr(ngid), uintptr(unsafe.Pointer(gid)), 0)\n\tn = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go#L17-L24", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Fadvise", "code": "func Fadvise(fd int, offset int64, length int64, advice int) (err error) {\n\t_, _, e1 := Syscall9(SYS_POSIX_FADVISE, uintptr(fd), 0, uintptr(offset), uintptr(offset>>32), uintptr(length), uintptr(length>>32), uintptr(advice), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm.go#L828-L834", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "lookupValue", "code": "func (t *bidiTrie) lookupValue(n uint32, b byte) uint8 {\n\tswitch {\n\tdefault:\n\t\treturn uint8(bidiValues[n<<6+uint32(b)])\n\t}\n}", "docstring": "// lookupValue determines the type of block n and looks up the value for b.", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/golang.org/x/text/unicode/bidi/tables9.0.0.go#L194-L199", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Snapshot", "code": "func (e *Encoder) Snapshot() encoderState {\n\treturn e.encoderState\n}", "docstring": "// Snapshot returns the current snapshot for use in Reset.", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/google.golang.org/protobuf/internal/encoding/text/encode.go#L260-L262", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "MustParse", "code": "func MustParse(str string) Quantity {\n\tq, err := ParseQuantity(str)\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"cannot parse '%v': %v\", str, err))\n\t}\n\treturn q\n}", "docstring": "// MustParse turns the given string into a quantity or panics; for tests\n// or other cases where you know the string is valid.", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/k8s.io/apimachinery/pkg/api/resource/quantity.go#L138-L144", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RollingUpdateDeployment", "code": "func RollingUpdateDeployment() *RollingUpdateDeploymentApplyConfiguration {\n\treturn &RollingUpdateDeploymentApplyConfiguration{}\n}", "docstring": "// RollingUpdateDeploymentApplyConfiguration constructs a declarative configuration of the RollingUpdateDeployment type for use with\n// apply.", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/k8s.io/client-go/applyconfigurations/extensions/v1beta1/rollingupdatedeployment.go#L34-L36", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IngressTLS", "code": "func IngressTLS() *IngressTLSApplyConfiguration {\n\treturn &IngressTLSApplyConfiguration{}\n}", "docstring": "// IngressTLSApplyConfiguration constructs a declarative configuration of the IngressTLS type for use with\n// apply.", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/k8s.io/client-go/applyconfigurations/networking/v1/ingresstls.go#L30-L32", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExtractResourceSliceStatus", "code": "func ExtractResourceSliceStatus(resourceSlice *resourcev1alpha3.ResourceSlice, fieldManager string) (*ResourceSliceApplyConfiguration, error) {\n\treturn extractResourceSlice(resourceSlice, fieldManager, \"status\")\n}", "docstring": "// ExtractResourceSliceStatus is the same as ExtractResourceSlice except\n// that it extracts the status subresource applied configuration.\n// Experimental!", "url": "https://github.com/kubernetes-sigs/cloud-provider-kind/blob/4b1de87e50fa9cab5d8ab80af7108cc233223620/vendor/k8s.io/client-go/applyconfigurations/resource/v1alpha3/resourceslice.go#L66-L68", "sha": "4b1de87e50fa9cab5d8ab80af7108cc233223620"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CreatedAtIn", "code": "func CreatedAtIn(vs ...time.Time) predicate.Integration {\n\treturn predicate.Integration(sql.FieldIn(FieldCreatedAt, vs...))\n}", "docstring": "// CreatedAtIn applies the In predicate on the \"created_at\" field.", "url": "https://github.com/chainloop-dev/chainloop/blob/a5f4b377b1676bbe7fb1deb146964d13f6caf9fb/app/controlplane/pkg/data/ent/integration/where.go#L375-L377", "sha": "a5f4b377b1676bbe7fb1deb146964d13f6caf9fb"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AnyToInt", "code": "func AnyToInt(i any) (int, error) {\n\tv, err := AnyToInt64(i)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\n\t// int 兼容32位和64位系统\n\t//if int64(int(v)) != v {\n\t//\treturn 0, ErrValOut\n\t//}\n\n\treturn int(v), nil\n}", "docstring": "// AnyToInt 将给定的值转换为 int", "url": "https://github.com/jefferyjob/go-easy-utils/blob/5cc229e3a6de69f3f038a8d7468a990a250a45e2/anyUtil/any_to_int_x.go#L10-L22", "sha": "5cc229e3a6de69f3f038a8d7468a990a250a45e2"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ChallengeBlocks", "code": "func ChallengeBlocks(domain, keyAuth string) ([]byte, []byte, error) {\n\t// Compute the SHA-256 digest of the key authorization.\n\tzBytes := sha256.Sum256([]byte(keyAuth))\n\n\tvalue, err := asn1.Marshal(zBytes[:sha256.Size])\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// Add the keyAuth digest as the acmeValidation-v1 extension\n\t// (marked as critical such that it won't be used by non-ACME software).\n\t// Reference: https://tools.ietf.org/html/draft-ietf-acme-tls-alpn-05#section-3\n\textensions := []pkix.Extension{\n\t\t{\n\t\t\tId: idPeAcmeIdentifierV1,\n\t\t\tCritical: true,\n\t\t\tValue: value,\n\t\t},\n\t}\n\n\t// Generate a new RSA key for the certificates.\n\ttempPrivateKey, err := certcrypto.GeneratePrivateKey(certcrypto.RSA2048)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\trsaPrivateKey := tempPrivateKey.(*rsa.PrivateKey)\n\n\t// Generate the PEM certificate using the provided private key, domain, and extra extensions.\n\ttempCertPEM, err := certcrypto.GeneratePemCert(rsaPrivateKey, domain, extensions)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// Encode the private key into a PEM format. We'll need to use it to generate the x509 keypair.\n\trsaPrivatePEM := certcrypto.PEMEncode(rsaPrivateKey)\n\n\treturn tempCertPEM, rsaPrivatePEM, nil\n}", "docstring": "// ChallengeBlocks returns PEM blocks (certPEMBlock, keyPEMBlock) with the acmeValidation-v1 extension\n// and domain name for the `tls-alpn-01` challenge.", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/github.com/go-acme/lego/v3/challenge/tlsalpn01/tls_alpn_challenge.go#L75-L113", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "callgetsystemcfg", "code": "func callgetsystemcfg(label int) (r1 uintptr, e1 Errno) {\n\tr1 = uintptr(C.getsystemcfg(C.int(label)))\n\te1 = syscall.GetErrno()\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gccgo.go#L1026-L1030", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExtattrListFile", "code": "func ExtattrListFile(file string, attrnamespace int, data uintptr, nbytes int) (ret int, err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(file)\n\tif err != nil {\n\t\treturn\n\t}\n\tr0, _, e1 := Syscall6(SYS_EXTATTR_LIST_FILE, uintptr(unsafe.Pointer(_p0)), uintptr(attrnamespace), uintptr(data), uintptr(nbytes), 0, 0)\n\tret = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go#L704-L716", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Setrlimit", "code": "func Setrlimit(resource int, rlim *Rlimit) (err error) {\n\t_, e1 := callsetrlimit(resource, uintptr(unsafe.Pointer(rlim)))\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64.go#L1414-L1420", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExtattrGetFd", "code": "func ExtattrGetFd(fd int, attrnamespace int, attrname string, data uintptr, nbytes int) (ret int, err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(attrname)\n\tif err != nil {\n\t\treturn\n\t}\n\tr0, _, e1 := Syscall6(SYS_EXTATTR_GET_FD, uintptr(fd), uintptr(attrnamespace), uintptr(unsafe.Pointer(_p0)), uintptr(data), uintptr(nbytes), 0)\n\tret = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/zsyscall_netbsd_386.go#L573-L585", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Access", "code": "func Access(path string, mode uint32) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := Syscall(SYS_ACCESS, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm.go#L425-L436", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Kill", "code": "func Kill(pid int, signum syscall.Signal) (err error) {\n\t_, _, e1 := Syscall(SYS_KILL, uintptr(pid), uintptr(signum), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm.go#L1173-L1179", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ExtattrListFd", "code": "func ExtattrListFd(fd int, attrnamespace int, data uintptr, nbytes int) (ret int, err error) {\n\tr0, _, e1 := Syscall6(SYS_EXTATTR_LIST_FD, uintptr(fd), uintptr(attrnamespace), uintptr(data), uintptr(nbytes), 0, 0)\n\tret = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go#L631-L638", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "callfchown", "code": "func callfchown(fd int, uid int, gid int) (r1 uintptr, e1 Errno) {\n\tr1, _, e1 = syscall6(uintptr(unsafe.Pointer(&libc_fchown)), 3, uintptr(fd), uintptr(uid), uintptr(gid), 0, 0, 0)\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gc.go#L797-L800", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "KeyctlString", "code": "func KeyctlString(cmd int, id int) (string, error) {\n\t// We must loop as the string data may change in between the syscalls.\n\t// We could allocate a large buffer here to reduce the chance that the\n\t// syscall needs to be called twice; however, this is unnecessary as\n\t// the performance loss is negligible.\n\tvar buffer []byte\n\tfor {\n\t\t// Try to fill the buffer with data\n\t\tlength, err := KeyctlBuffer(cmd, id, buffer, 0)\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\n\t\t// Check if the data was written\n\t\tif length <= len(buffer) {\n\t\t\t// Exclude the null terminator\n\t\t\treturn string(buffer[:length-1]), nil\n\t\t}\n\n\t\t// Make a bigger buffer if needed\n\t\tbuffer = make([]byte, length)\n\t}\n}", "docstring": "// Keyctl Commands (http://man7.org/linux/man-pages/man2/keyctl.2.html)\n// KeyctlInt calls keyctl commands in which each argument is an int.\n// These commands are KEYCTL_REVOKE, KEYCTL_CHOWN, KEYCTL_CLEAR, KEYCTL_LINK,\n// KEYCTL_UNLINK, KEYCTL_NEGATE, KEYCTL_SET_REQKEY_KEYRING, KEYCTL_SET_TIMEOUT,\n// KEYCTL_ASSUME_AUTHORITY, KEYCTL_SESSION_TO_PARENT, KEYCTL_REJECT,\n// KEYCTL_INVALIDATE, and KEYCTL_GET_PERSISTENT.\n//sys\tKeyctlInt(cmd int, arg2 int, arg3 int, arg4 int, arg5 int) (ret int, err error) = SYS_KEYCTL\n// KeyctlBuffer calls keyctl commands in which the third and fourth\n// arguments are a buffer and its length, respectively.\n// These commands are KEYCTL_UPDATE, KEYCTL_READ, and KEYCTL_INSTANTIATE.\n//sys\tKeyctlBuffer(cmd int, arg2 int, buf []byte, arg5 int) (ret int, err error) = SYS_KEYCTL\n// KeyctlString calls keyctl commands which return a string.\n// These commands are KEYCTL_DESCRIBE and KEYCTL_GET_SECURITY.", "url": "https://github.com/aalex954/evilginx2-TTPs/blob/6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b/vendor/golang.org/x/sys/unix/syscall_linux.go#L1201-L1223", "sha": "6813b69e5eed2c14da2a57c8627cfb2bffb2cd5b"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewError", "code": "func NewError(reason string) *Error {\n\treturn &Error{reason: reason}\n}", "docstring": "// NewError returns a new error.", "url": "https://github.com/ublue-os/fleek/blob/2beec3661ab231c03034b817b3b4ee60125e3173/vendor/github.com/go-git/go-git/v5/plumbing/format/packfile/error.go#L11-L13", "sha": "2beec3661ab231c03034b817b3b4ee60125e3173"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Changed", "code": "func (f *FlagSet) Changed(name string) bool {\n\tflag := f.Lookup(name)\n\t// If a flag doesn't exist, it wasn't changed....\n\tif flag == nil {\n\t\treturn false\n\t}\n\treturn flag.Changed\n}", "docstring": "// Changed returns true if the flag was explicitly set during Parse() and false\n// otherwise", "url": "https://github.com/ublue-os/fleek/blob/2beec3661ab231c03034b817b3b4ee60125e3173/vendor/github.com/spf13/pflag/flag.go#L508-L515", "sha": "2beec3661ab231c03034b817b3b4ee60125e3173"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Fstat", "code": "func Fstat(fd int, stat *Stat_t) (err error) {\n\t_, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/ublue-os/fleek/blob/2beec3661ab231c03034b817b3b4ee60125e3173/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go#L85-L91", "sha": "2beec3661ab231c03034b817b3b4ee60125e3173"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pread", "code": "func pread(fd int, p []byte, offset int64) (n int, err error) {\n\tvar _p0 unsafe.Pointer\n\tif len(p) > 0 {\n\t\t_p0 = unsafe.Pointer(&p[0])\n\t} else {\n\t\t_p0 = unsafe.Pointer(&_zero)\n\t}\n\tr0, _, e1 := syscall_syscall6(libc_pread_trampoline_addr, uintptr(fd), uintptr(_p0), uintptr(len(p)), 0, uintptr(offset), uintptr(offset>>32))\n\tn = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_pathconf pathconf \"libc.so\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/ublue-os/fleek/blob/2beec3661ab231c03034b817b3b4ee60125e3173/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go#L1570-L1583", "sha": "2beec3661ab231c03034b817b3b4ee60125e3173"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Access", "code": "func Access(path string, mode uint32) (err error) {\n\treturn Faccessat(AT_FDCWD, path, mode, 0)\n}", "docstring": "/*\n * Wrapped\n */", "url": "https://github.com/ublue-os/fleek/blob/2beec3661ab231c03034b817b3b4ee60125e3173/vendor/golang.org/x/sys/unix/syscall_aix.go#L22-L24", "sha": "2beec3661ab231c03034b817b3b4ee60125e3173"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Sync", "code": "func Sync() {\n\tSyscallNoError(SYS_SYNC, 0, 0, 0)\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/ublue-os/fleek/blob/2beec3661ab231c03034b817b3b4ee60125e3173/vendor/golang.org/x/sys/unix/zsyscall_linux.go#L1602-L1605", "sha": "2beec3661ab231c03034b817b3b4ee60125e3173"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FieldCleared", "code": "func (m *FeedbackSummarizationsReactionsMutation) FieldCleared(name string) bool {\n\t_, ok := m.clearedFields[name]\n\treturn ok\n}", "docstring": "// FieldCleared returns a boolean indicating if a field with the given name was\n// cleared in this mutation.", "url": "https://github.com/nekomeowww/insights-bot/blob/967fa028caf356b0a96eaf19d85dcaa2968bbd1f/ent/mutation.go#L3043-L3046", "sha": "967fa028caf356b0a96eaf19d85dcaa2968bbd1f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Linkat", "code": "func Linkat(pathfd int, path string, linkfd int, link string, flags int) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\tvar _p1 *byte\n\t_p1, err = BytePtrFromString(link)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := Syscall6(SYS_LINKAT, uintptr(pathfd), uintptr(unsafe.Pointer(_p0)), uintptr(linkfd), uintptr(unsafe.Pointer(_p1)), uintptr(flags), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/jsclayton/prometheus-plex-exporter/blob/041ea971fadc6db467ef8f792b6f4f824cc3034e/vendor/golang.org/x/sys/unix/zsyscall_freebsd_386.go#L1230-L1246", "sha": "041ea971fadc6db467ef8f792b6f4f824cc3034e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "mknodat", "code": "func mknodat(fd int, path string, mode uint32, dev int) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := Syscall6(SYS_MKNODAT, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/jsclayton/prometheus-plex-exporter/blob/041ea971fadc6db467ef8f792b6f4f824cc3034e/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go#L1335-L1346", "sha": "041ea971fadc6db467ef8f792b6f4f824cc3034e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Tee", "code": "func Tee(rfd int, wfd int, len int, flags int) (n int64, err error) {\n\tr0, _, e1 := Syscall6(SYS_TEE, uintptr(rfd), uintptr(wfd), uintptr(len), uintptr(flags), 0, 0)\n\tn = int64(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/jsclayton/prometheus-plex-exporter/blob/041ea971fadc6db467ef8f792b6f4f824cc3034e/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go#L38-L45", "sha": "041ea971fadc6db467ef8f792b6f4f824cc3034e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Fclonefileat", "code": "func Fclonefileat(srcDirfd int, dstDirfd int, dst string, flags int) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(dst)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := syscall_syscall6(libc_fclonefileat_trampoline_addr, uintptr(srcDirfd), uintptr(dstDirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_fchownat fchownat \"/usr/lib/libSystem.B.dylib\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/jsclayton/prometheus-plex-exporter/blob/041ea971fadc6db467ef8f792b6f4f824cc3034e/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.go#L1182-L1193", "sha": "041ea971fadc6db467ef8f792b6f4f824cc3034e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Setresgid", "code": "func Setresgid(rgid int, egid int, sgid int) (err error) {\n\t_, _, e1 := RawSyscall(SYS_SETRESGID, uintptr(rgid), uintptr(egid), uintptr(sgid))\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/jsclayton/prometheus-plex-exporter/blob/041ea971fadc6db467ef8f792b6f4f824cc3034e/vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go#L1695-L1701", "sha": "041ea971fadc6db467ef8f792b6f4f824cc3034e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RegisterExtension", "code": "func (r *Types) RegisterExtension(xt protoreflect.ExtensionType) error {\n\t// Under rare circumstances getting the descriptor might recursively\n\t// examine the registry, so fetch it before locking.\n\t//\n\t// A known case where this can happen: Fetching the TypeDescriptor for a\n\t// legacy ExtensionDesc can consult the global registry.\n\txd := xt.TypeDescriptor()\n\n\tif r == GlobalTypes {\n\t\tglobalMutex.Lock()\n\t\tdefer globalMutex.Unlock()\n\t}\n\n\tfield := xd.Number()\n\tmessage := xd.ContainingMessage().FullName()\n\tif prev := r.extensionsByMessage[message][field]; prev != nil {\n\t\terr := errors.New(\"extension number %d is already registered on message %v\", field, message)\n\t\terr = amendErrorWithCaller(err, prev, xt)\n\t\tif !(r == GlobalTypes && ignoreConflict(xd, err)) {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif err := r.register(\"extension\", xd, xt); err != nil {\n\t\treturn err\n\t}\n\tif r.extensionsByMessage == nil {\n\t\tr.extensionsByMessage = make(extensionsByMessage)\n\t}\n\tif r.extensionsByMessage[message] == nil {\n\t\tr.extensionsByMessage[message] = make(extensionsByNumber)\n\t}\n\tr.extensionsByMessage[message][field] = xt\n\tr.numExtensions++\n\treturn nil\n}", "docstring": "// RegisterExtension registers the provided extension type.\n//\n// If a naming conflict occurs, the type is not registered and an error is returned.", "url": "https://github.com/jsclayton/prometheus-plex-exporter/blob/041ea971fadc6db467ef8f792b6f4f824cc3034e/vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go#L536-L571", "sha": "041ea971fadc6db467ef8f792b6f4f824cc3034e"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NextPosition", "code": "func (ev binlogEvent) NextPosition() uint32 {\n\treturn binary.LittleEndian.Uint32(ev.Bytes()[13 : 13+4])\n}", "docstring": "// NextPosition returns the nextPosition field from the header", "url": "https://github.com/wesql/wescale/blob/5336f06e5b14979f277b061d622ecf756f16f2c1/go/mysql/binlog_event_common.go#L113-L115", "sha": "5336f06e5b14979f277b061d622ecf756f16f2c1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ConvertStateToMySQLErrorCode", "code": "func ConvertStateToMySQLErrorCode(state vterrors.State) string {\n\ts := getStateToMySQLState(state)\n\treturn strconv.Itoa(s.num)\n}", "docstring": "// ConvertStateToMySQLErrorCode returns MySQL error code for the given vterrors.State\n// If the state is == 0, an empty string is returned", "url": "https://github.com/wesql/wescale/blob/5336f06e5b14979f277b061d622ecf756f16f2c1/go/mysql/sql_error.go#L214-L217", "sha": "5336f06e5b14979f277b061d622ecf756f16f2c1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "checkTablesCount", "code": "func checkTablesCount(t *testing.T, tablet *cluster.Vttablet, showTableName string, expectCount int) {\n\tquery := fmt.Sprintf(`show tables like '%%%s%%';`, showTableName)\n\tqueryResult, err := tablet.VttabletProcess.QueryTablet(query, keyspaceName, true)\n\trequire.Nil(t, err)\n\tassert.Equal(t, expectCount, len(queryResult.Rows))\n}", "docstring": "// checkTablesCount checks the number of tables in the given tablet", "url": "https://github.com/wesql/wescale/blob/5336f06e5b14979f277b061d622ecf756f16f2c1/go/test/endtoend/versionupgrade/upgrade_test.go#L181-L186", "sha": "5336f06e5b14979f277b061d622ecf756f16f2c1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetToTables", "code": "func (node *TruncateTable) GetToTables() TableNames {\n\treturn nil\n}", "docstring": "// GetToTables implements the DDLStatement interface", "url": "https://github.com/wesql/wescale/blob/5336f06e5b14979f277b061d622ecf756f16f2c1/go/vt/sqlparser/ast.go#L1571-L1573", "sha": "5336f06e5b14979f277b061d622ecf756f16f2c1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AndNot", "code": "func (bs Bitset) AndNot(b2 Bitset) Bitset {\n\tif len(b2) == 0 {\n\t\treturn bs\n\t}\n\n\tmerged := make([]byte, len(bs))\n\tm := 0\n\n\tfor m = 0; m < len(bs); m++ {\n\t\tif m < len(b2) {\n\t\t\tmerged[m] = bs[m] & ^b2[m]\n\t\t} else {\n\t\t\tmerged[m] = bs[m]\n\t\t}\n\t}\n\tfor ; m > 0; m-- {\n\t\tif merged[m-1] != 0 {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn toBitset(merged[:m])\n}", "docstring": "// AndNot returns the logical AND NOT of the two Bitsets as a new Bitset", "url": "https://github.com/wesql/wescale/blob/5336f06e5b14979f277b061d622ecf756f16f2c1/go/vt/vtgate/semantics/bitset/bitset.go#L100-L121", "sha": "5336f06e5b14979f277b061d622ecf756f16f2c1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GenerateInsertStatement", "code": "func GenerateInsertStatement(qr *rules.Rule, ifNotExist bool) (string, error) {\n\tinsertTemplate := getInsertSQLTemplate(ifNotExist)\n\tparsed := sqlparser.BuildParsedQuery(insertTemplate,\n\t\t\":name\",\n\t\t\":description\",\n\t\t\":priority\",\n\t\t\":status\",\n\t\t\":plans\",\n\t\t\":fully_qualified_table_names\",\n\t\t\":query_regex\",\n\t\t\":query_template\",\n\t\t\":request_ip_regex\",\n\t\t\":user_regex\",\n\t\t\":leading_comment_regex\",\n\t\t\":trailing_comment_regex\",\n\t\t\":bind_var_conds\",\n\t\t\":action\",\n\t\t\":action_args\",\n\t)\n\tbindVars, err := qr.ToBindVariable()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tbound, err := parsed.GenerateQuery(bindVars, nil)\n\treturn bound, err\n}", "docstring": "// GenerateInsertStatement returns the SQL statement to insert the rule into the database.", "url": "https://github.com/wesql/wescale/blob/5336f06e5b14979f277b061d622ecf756f16f2c1/go/vt/vttablet/customrule/util.go#L26-L51", "sha": "5336f06e5b14979f277b061d622ecf756f16f2c1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "buildSelectColumnList", "code": "func buildSelectColumnList(t *schema.Table) string {\n\tbuf := sqlparser.NewTrackedBuffer(nil)\n\tfor i, c := range t.MessageInfo.Fields {\n\t\t// Column names may have to be escaped.\n\t\tif i == 0 {\n\t\t\tbuf.Myprintf(\"%v\", sqlparser.NewIdentifierCI(c.Name))\n\t\t} else {\n\t\t\tbuf.Myprintf(\", %v\", sqlparser.NewIdentifierCI(c.Name))\n\t\t}\n\t}\n\treturn buf.String()\n}", "docstring": "// buildSelectColumnList is a convenience function that\n// builds a 'select' list for the user-defined columns.", "url": "https://github.com/wesql/wescale/blob/5336f06e5b14979f277b061d622ecf756f16f2c1/go/vt/vttablet/tabletserver/messager/message_manager.go#L340-L351", "sha": "5336f06e5b14979f277b061d622ecf756f16f2c1"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewCollectedStatusLister", "code": "func NewCollectedStatusLister(indexer cache.Indexer) CollectedStatusLister {\n\treturn &collectedStatusLister{indexer: indexer}\n}", "docstring": "// NewCollectedStatusLister returns a new CollectedStatusLister.", "url": "https://github.com/kubewharf/kubeadmiral/blob/c1d6d2c17b09b129f84303e233a34514f0ace318/pkg/client/listers/core/v1alpha1/collectedstatus.go#L29-L31", "sha": "c1d6d2c17b09b129f84303e233a34514f0ace318"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Lstat", "code": "func Lstat(name string) (os.FileInfo, error) {\n\tdefer updateOSMetrics(osMetricLstat, name)()\n\treturn os.Lstat(name)\n}", "docstring": "// Lstat captures time taken to call os.Lstat", "url": "https://github.com/AbelChe/evil_minio/blob/d337ea171f7833d286ae351ca68fd145d8b40e86/cmd/os-instrumented.go#L169-L172", "sha": "d337ea171f7833d286ae351ca68fd145d8b40e86"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ParseHTTP", "code": "func (sses3) ParseHTTP(h http.Header) error {\n\tif h.Get(xhttp.AmzServerSideEncryption) != xhttp.AmzEncryptionAES {\n\t\treturn ErrInvalidEncryptionMethod\n\t}\n\treturn nil\n}", "docstring": "// ParseHTTP parses the SSE-S3 related HTTP headers and checks\n// whether they contain valid values.", "url": "https://github.com/AbelChe/evil_minio/blob/d337ea171f7833d286ae351ca68fd145d8b40e86/internal/crypto/sse-s3.go#L55-L60", "sha": "d337ea171f7833d286ae351ca68fd145d8b40e86"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetFilteringEnabled", "code": "func (o *ClientFindSubEntry) GetFilteringEnabled() bool {\n\tif o == nil || o.FilteringEnabled == nil {\n\t\tvar ret bool\n\t\treturn ret\n\t}\n\treturn *o.FilteringEnabled\n}", "docstring": "// GetFilteringEnabled returns the FilteringEnabled field value if set, zero value otherwise.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/internal/adguard/gen/model_client_find_sub_entry.go#L152-L158", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IsCRC32", "code": "func IsCRC32(str string) bool {\n\treturn IsHash(str, \"crc32\")\n}", "docstring": "// IsCRC32 checks is a string is a CRC32 hash. Alias for `IsHash(str, \"crc32\")`", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/asaskevich/govalidator/validator.go#L778-L780", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "NewSignalChannel", "code": "func NewSignalChannel() (<-chan os.Signal, func()) {\n\tsignalC := make(chan os.Signal, 1)\n\tsignal.Notify(signalC, signals...)\n\treturn signalC, func() {\n\t\tsignal.Stop(signalC)\n\t\tclose(signalC)\n\t}\n}", "docstring": "// NewSignalChannel returns a new channel for interrupt signals.\n//\n// Call the returned function to cancel sending to this channel.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/bufbuild/buf/private/pkg/interrupt/interrupt.go#L45-L52", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Set", "code": "func (p *PortOpt) Set(value string) error {\n\tlongSyntax, err := regexp.MatchString(`\\w+=\\w+(,\\w+=\\w+)*`, value)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif longSyntax {\n\t\tcsvReader := csv.NewReader(strings.NewReader(value))\n\t\tfields, err := csvReader.Read()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tpConfig := swarm.PortConfig{}\n\t\tfor _, field := range fields {\n\t\t\tparts := strings.SplitN(field, \"=\", 2)\n\t\t\tif len(parts) != 2 {\n\t\t\t\treturn fmt.Errorf(\"invalid field %s\", field)\n\t\t\t}\n\n\t\t\tkey := strings.ToLower(parts[0])\n\t\t\tvalue := strings.ToLower(parts[1])\n\n\t\t\tswitch key {\n\t\t\tcase portOptProtocol:\n\t\t\t\tif value != string(swarm.PortConfigProtocolTCP) && value != string(swarm.PortConfigProtocolUDP) && value != string(swarm.PortConfigProtocolSCTP) {\n\t\t\t\t\treturn fmt.Errorf(\"invalid protocol value %s\", value)\n\t\t\t\t}\n\n\t\t\t\tpConfig.Protocol = swarm.PortConfigProtocol(value)\n\t\t\tcase portOptMode:\n\t\t\t\tif value != string(swarm.PortConfigPublishModeIngress) && value != string(swarm.PortConfigPublishModeHost) {\n\t\t\t\t\treturn fmt.Errorf(\"invalid publish mode value %s\", value)\n\t\t\t\t}\n\n\t\t\t\tpConfig.PublishMode = swarm.PortConfigPublishMode(value)\n\t\t\tcase portOptTargetPort:\n\t\t\t\ttPort, err := strconv.ParseUint(value, 10, 16)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tpConfig.TargetPort = uint32(tPort)\n\t\t\tcase portOptPublishedPort:\n\t\t\t\tpPort, err := strconv.ParseUint(value, 10, 16)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tpConfig.PublishedPort = uint32(pPort)\n\t\t\tdefault:\n\t\t\t\treturn fmt.Errorf(\"invalid field key %s\", key)\n\t\t\t}\n\t\t}\n\n\t\tif pConfig.TargetPort == 0 {\n\t\t\treturn fmt.Errorf(\"missing mandatory field %q\", portOptTargetPort)\n\t\t}\n\n\t\tif pConfig.PublishMode == \"\" {\n\t\t\tpConfig.PublishMode = swarm.PortConfigPublishModeIngress\n\t\t}\n\n\t\tif pConfig.Protocol == \"\" {\n\t\t\tpConfig.Protocol = swarm.PortConfigProtocolTCP\n\t\t}\n\n\t\tp.ports = append(p.ports, pConfig)\n\t} else {\n\t\t// short syntax\n\t\tportConfigs := []swarm.PortConfig{}\n\t\tports, portBindingMap, err := nat.ParsePortSpecs([]string{value})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfor _, portBindings := range portBindingMap {\n\t\t\tfor _, portBinding := range portBindings {\n\t\t\t\tif portBinding.HostIP != \"\" {\n\t\t\t\t\treturn fmt.Errorf(\"hostip is not supported\")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tfor port := range ports {\n\t\t\tportConfig, err := ConvertPortToPortConfig(port, portBindingMap)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tportConfigs = append(portConfigs, portConfig...)\n\t\t}\n\t\tp.ports = append(p.ports, portConfigs...)\n\t}\n\treturn nil\n}", "docstring": "// Set a new port value\n// nolint: gocyclo", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/docker/cli/opts/port.go#L29-L121", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IsDataLoss", "code": "func IsDataLoss(err error) bool {\n\t_, ok := getImplementer(err).(ErrDataLoss)\n\treturn ok\n}", "docstring": "// IsDataLoss returns if the passed in error is an ErrDataLoss", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/docker/docker/errdefs/is.go#L104-L107", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Marshal", "code": "func Marshal(v interface{}) ([]byte, error) {\n\treturn marshal(v, false)\n}", "docstring": "// Marshal returns the JSON encoding of v.\n//\n// Marshal traverses the value v recursively.\n// If an encountered value implements the Marshaler interface\n// and is not a nil pointer, Marshal calls its MarshalJSON method\n// to produce JSON. If no MarshalJSON method is present but the\n// value implements encoding.TextMarshaler instead, Marshal calls\n// its MarshalText method.\n// The nil pointer exception is not strictly necessary\n// but mimics a similar, necessary exception in the behavior of\n// UnmarshalJSON.\n//\n// Otherwise, Marshal uses the following type-dependent default encodings:\n//\n// Boolean values encode as JSON booleans.\n//\n// Floating point, integer, and Number values encode as JSON numbers.\n//\n// String values encode as JSON strings coerced to valid UTF-8,\n// replacing invalid bytes with the Unicode replacement rune.\n// The angle brackets \"<\" and \">\" are escaped to \"\\u003c\" and \"\\u003e\"\n// to keep some browsers from misinterpreting JSON output as HTML.\n// Ampersand \"&\" is also escaped to \"\\u0026\" for the same reason.\n//\n// Array and slice values encode as JSON arrays, except that\n// []byte encodes as a base64-encoded string, and a nil slice\n// encodes as the null JSON object.\n//\n// Struct values encode as JSON objects. Each exported struct field\n// becomes a member of the object unless\n// - the field's tag is \"-\", or\n// - the field is empty and its tag specifies the \"omitempty\" option.\n// The empty values are false, 0, any\n// nil pointer or interface value, and any array, slice, map, or string of\n// length zero. The object's default key string is the struct field name\n// but can be specified in the struct field's tag value. The \"json\" key in\n// the struct field's tag value is the key name, followed by an optional comma\n// and options. Examples:\n//\n// // Field is ignored by this package.\n// Field int `json:\"-\"`\n//\n// // Field appears in JSON as key \"myName\".\n// Field int `json:\"myName\"`\n//\n// // Field appears in JSON as key \"myName\" and\n// // the field is omitted from the object if its value is empty,\n// // as defined above.\n// Field int `json:\"myName,omitempty\"`\n//\n// // Field appears in JSON as key \"Field\" (the default), but\n// // the field is skipped if empty.\n// // Note the leading comma.\n// Field int `json:\",omitempty\"`\n//\n// The \"string\" option signals that a field is stored as JSON inside a\n// JSON-encoded string. It applies only to fields of string, floating point,\n// integer, or boolean types. This extra level of encoding is sometimes used\n// when communicating with JavaScript programs:\n//\n// Int64String int64 `json:\",string\"`\n//\n// The key name will be used if it's a non-empty string consisting of\n// only Unicode letters, digits, dollar signs, percent signs, hyphens,\n// underscores and slashes.\n//\n// Anonymous struct fields are usually marshaled as if their inner exported fields\n// were fields in the outer struct, subject to the usual Go visibility rules amended\n// as described in the next paragraph.\n// An anonymous struct field with a name given in its JSON tag is treated as\n// having that name, rather than being anonymous.\n// An anonymous struct field of interface type is treated the same as having\n// that type as its name, rather than being anonymous.\n//\n// The Go visibility rules for struct fields are amended for JSON when\n// deciding which field to marshal or unmarshal. If there are\n// multiple fields at the same level, and that level is the least\n// nested (and would therefore be the nesting level selected by the\n// usual Go rules), the following extra rules apply:\n//\n// 1) Of those fields, if any are JSON-tagged, only tagged fields are considered,\n// even if there are multiple untagged fields that would otherwise conflict.\n// 2) If there is exactly one field (tagged or not according to the first rule), that is selected.\n// 3) Otherwise there are multiple fields, and all are ignored; no error occurs.\n//\n// Handling of anonymous struct fields is new in Go 1.1.\n// Prior to Go 1.1, anonymous struct fields were ignored. To force ignoring of\n// an anonymous struct field in both current and earlier versions, give the field\n// a JSON tag of \"-\".\n//\n// Map values encode as JSON objects.\n// The map's key type must be string; the map keys are used as JSON object\n// keys, subject to the UTF-8 coercion described for string values above.\n//\n// Pointer values encode as the value pointed to.\n// A nil pointer encodes as the null JSON object.\n//\n// Interface values encode as the value contained in the interface.\n// A nil interface value encodes as the null JSON object.\n//\n// Channel, complex, and function values cannot be encoded in JSON.\n// Attempting to encode such a value causes Marshal to return\n// an UnsupportedTypeError.\n//\n// JSON cannot represent cyclic data structures and Marshal does not\n// handle them. Passing cyclic structures to Marshal will result in\n// an infinite recursion.\n//", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/docker/go/canonical/json/encode.go#L137-L139", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RandString", "code": "func (c Continue) RandString() string {\n\treturn randString(c.Rand)\n}", "docstring": "// RandString makes a random string up to 20 characters long. The returned string\n// may include a variety of (valid) UTF-8 encodings.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/google/gofuzz/fuzz.go#L431-L433", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "FieldMaskFromRequestBody", "code": "func FieldMaskFromRequestBody(r io.Reader, msg proto.Message) (*field_mask.FieldMask, error) {\n\tfm := &field_mask.FieldMask{}\n\tvar root interface{}\n\n\tif err := json.NewDecoder(r).Decode(&root); err != nil {\n\t\tif err == io.EOF {\n\t\t\treturn fm, nil\n\t\t}\n\t\treturn nil, err\n\t}\n\n\tqueue := []fieldMaskPathItem{{node: root, msg: msg.ProtoReflect()}}\n\tfor len(queue) > 0 {\n\t\t// dequeue an item\n\t\titem := queue[0]\n\t\tqueue = queue[1:]\n\n\t\tm, ok := item.node.(map[string]interface{})\n\t\tswitch {\n\t\tcase ok:\n\t\t\t// if the item is an object, then enqueue all of its children\n\t\t\tfor k, v := range m {\n\t\t\t\tif item.msg == nil {\n\t\t\t\t\treturn nil, errors.New(\"JSON structure did not match request type\")\n\t\t\t\t}\n\n\t\t\t\tfd := getFieldByName(item.msg.Descriptor().Fields(), k)\n\t\t\t\tif fd == nil {\n\t\t\t\t\treturn nil, fmt.Errorf(\"could not find field %q in %q\", k, item.msg.Descriptor().FullName())\n\t\t\t\t}\n\n\t\t\t\tif isDynamicProtoMessage(fd.Message()) {\n\t\t\t\t\tfor _, p := range buildPathsBlindly(string(fd.FullName().Name()), v) {\n\t\t\t\t\t\tnewPath := p\n\t\t\t\t\t\tif item.path != \"\" {\n\t\t\t\t\t\t\tnewPath = item.path + \".\" + newPath\n\t\t\t\t\t\t}\n\t\t\t\t\t\tqueue = append(queue, fieldMaskPathItem{path: newPath})\n\t\t\t\t\t}\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tif isProtobufAnyMessage(fd.Message()) && !fd.IsList() {\n\t\t\t\t\t_, hasTypeField := v.(map[string]interface{})[\"@type\"]\n\t\t\t\t\tif hasTypeField {\n\t\t\t\t\t\tqueue = append(queue, fieldMaskPathItem{path: k})\n\t\t\t\t\t\tcontinue\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"could not find field @type in %q in message %q\", k, item.msg.Descriptor().FullName())\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tchild := fieldMaskPathItem{\n\t\t\t\t\tnode: v,\n\t\t\t\t}\n\t\t\t\tif item.path == \"\" {\n\t\t\t\t\tchild.path = string(fd.FullName().Name())\n\t\t\t\t} else {\n\t\t\t\t\tchild.path = item.path + \".\" + string(fd.FullName().Name())\n\t\t\t\t}\n\n\t\t\t\tswitch {\n\t\t\t\tcase fd.IsList(), fd.IsMap():\n\t\t\t\t\t// As per: https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto#L85-L86\n\t\t\t\t\t// Do not recurse into repeated fields. The repeated field goes on the end of the path and we stop.\n\t\t\t\t\tfm.Paths = append(fm.Paths, child.path)\n\t\t\t\tcase fd.Message() != nil:\n\t\t\t\t\tchild.msg = item.msg.Get(fd).Message()\n\t\t\t\t\tfallthrough\n\t\t\t\tdefault:\n\t\t\t\t\tqueue = append(queue, child)\n\t\t\t\t}\n\t\t\t}\n\t\tcase len(item.path) > 0:\n\t\t\t// otherwise, it's a leaf node so print its path\n\t\t\tfm.Paths = append(fm.Paths, item.path)\n\t\t}\n\t}\n\n\t// Sort for deterministic output in the presence\n\t// of repeated fields.\n\tsort.Strings(fm.Paths)\n\n\treturn fm, nil\n}", "docstring": "// FieldMaskFromRequestBody creates a FieldMask printing all complete paths from the JSON body.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/grpc-ecosystem/grpc-gateway/v2/runtime/fieldmask.go#L25-L110", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "indexNL", "code": "func (s *set) indexNL(key Key) int {\n\tfor i, k := range s.keys {\n\t\tif k == key {\n\t\t\treturn i\n\t\t}\n\t}\n\treturn -1\n}", "docstring": "// indexNL is Index(), but without the locking", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/lestrrat-go/jwx/jwk/set.go#L67-L74", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "BiDirectional", "code": "func BiDirectional() InitOption {\n\treturn func(g *Generator) { g.workflow = &onceWorkflow{workflow: &standardWorkflow{BiDi: true}} }\n}", "docstring": "// BiDirectional instructs the Generator to build the AST graph in both\n// directions (ie, accessing dependents of an entity, not just dependencies).", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/lyft/protoc-gen-star/v2/init_option.go#L47-L49", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "DebugFn", "code": "func DebugFn(fn LogFunction) {\n\tstd.DebugFn(fn)\n}", "docstring": "// DebugFn logs a message from a func at level Debug on the standard logger.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/sirupsen/logrus/exported.go#L143-L145", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Float64SliceVarP", "code": "func Float64SliceVarP(p *[]float64, name, shorthand string, value []float64, usage string) {\n\tCommandLine.VarP(newFloat64SliceValue(value, p), name, shorthand, usage)\n}", "docstring": "// Float64SliceVarP is like Float64SliceVar, but accepts a shorthand letter that can be used after a single dash.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/spf13/pflag/float64_slice.go#L138-L140", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "IPMaskVar", "code": "func IPMaskVar(p *net.IPMask, name string, value net.IPMask, usage string) {\n\tCommandLine.VarP(newIPMaskValue(value, p), name, \"\", usage)\n}", "docstring": "// IPMaskVar defines an net.IPMask flag with specified name, default value, and usage string.\n// The argument p points to an net.IPMask variable in which to store the value of the flag.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/github.com/spf13/pflag/ipmask.go#L89-L91", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "filterSet", "code": "func filterSet(kvs []KeyValue, filter Filter) (Set, []KeyValue) {\n\tvar excluded []KeyValue\n\n\t// Move attributes that do not match the filter so they're adjacent before\n\t// calling computeDistinct().\n\tdistinctPosition := len(kvs)\n\n\t// Swap indistinct keys forward and distinct keys toward the\n\t// end of the slice.\n\toffset := len(kvs) - 1\n\tfor ; offset >= 0; offset-- {\n\t\tif filter(kvs[offset]) {\n\t\t\tdistinctPosition--\n\t\t\tkvs[offset], kvs[distinctPosition] = kvs[distinctPosition], kvs[offset]\n\t\t\tcontinue\n\t\t}\n\t}\n\texcluded = kvs[:distinctPosition]\n\n\treturn Set{\n\t\tequivalent: computeDistinct(kvs[distinctPosition:]),\n\t}, excluded\n}", "docstring": "// filterSet reorders kvs so that included keys are contiguous at the end of\n// the slice, while excluded keys precede the included keys.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/go.opentelemetry.io/otel/attribute/set.go#L287-L309", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ptracePtr", "code": "func ptracePtr(request int, pid int, addr unsafe.Pointer, data int) (err error) {\n\t_, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/golang.org/x/sys/unix/zsyscall_freebsd_386.go#L427-L433", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pread", "code": "func pread(fd int, p []byte, offset int64) (n int, err error) {\n\tvar _p0 unsafe.Pointer\n\tif len(p) > 0 {\n\t\t_p0 = unsafe.Pointer(&p[0])\n\t} else {\n\t\t_p0 = unsafe.Pointer(&_zero)\n\t}\n\tr0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), 0, uintptr(offset), 0)\n\tn = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm64.go#L1353-L1366", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Getrlimit", "code": "func Getrlimit(resource int, rlim *Rlimit) (err error) {\n\t_, _, e1 := RawSyscall(SYS_GETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go#L155-L161", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "pread", "code": "func pread(fd int, p []byte, offset int64) (n int, err error) {\n\tvar _p0 *byte\n\tif len(p) > 0 {\n\t\t_p0 = &p[0]\n\t}\n\tr0, _, e1 := sysvicall6(uintptr(unsafe.Pointer(&procpread)), 4, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(len(p)), uintptr(offset), 0, 0)\n\tn = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/golang.org/x/sys/unix/zsyscall_solaris_amd64.go#L1424-L1435", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Namespace", "code": "func (s *EnvSettings) Namespace() string {\n\tif ns, _, err := s.config.ToRawKubeConfigLoader().Namespace(); err == nil {\n\t\treturn ns\n\t}\n\treturn \"default\"\n}", "docstring": "// Namespace gets the namespace from the configuration", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/helm.sh/helm/v3/pkg/cli/environment.go#L222-L227", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Resource", "code": "func Resource(resource string) schema.GroupResource {\n\treturn SchemeGroupVersion.WithResource(resource).GroupResource()\n}", "docstring": "// Resource takes an unqualified resource and returns a Group qualified GroupResource", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/k8s.io/api/flowcontrol/v1alpha1/register.go#L37-L39", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "RegisterDefaults", "code": "func RegisterDefaults(scheme *runtime.Scheme) error {\n\tscheme.AddTypeDefaultingFunc(&CustomResourceDefinition{}, func(obj interface{}) { SetObjectDefaults_CustomResourceDefinition(obj.(*CustomResourceDefinition)) })\n\tscheme.AddTypeDefaultingFunc(&CustomResourceDefinitionList{}, func(obj interface{}) {\n\t\tSetObjectDefaults_CustomResourceDefinitionList(obj.(*CustomResourceDefinitionList))\n\t})\n\treturn nil\n}", "docstring": "// RegisterDefaults adds defaulters functions to the given scheme.\n// Public to allow building arbitrary schemes.\n// All generated defaulters are covering - they call all nested defaulters.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1/zz_generated.defaults.go#L31-L37", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "UnmarshalJSON", "code": "func (r *Refable) UnmarshalJSON(d []byte) error {\n\treturn json.Unmarshal(d, &r.Ref)\n}", "docstring": "// UnmarshalJSON unmarshalss the ref from json", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/k8s.io/kube-openapi/pkg/validation/spec/ref.go#L37-L39", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ValidateBytes", "code": "func (c *paramVerifyingSchema) ValidateBytes(data []byte) error {\n\tobj, err := schemavalidation.Parse(data)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tgvk, errs := schemavalidation.GetObjectKind(obj)\n\tif errs != nil {\n\t\treturn utilerrors.NewAggregate(errs)\n\t}\n\n\terr = c.verifier.HasSupport(gvk)\n\tif resource.IsParamUnsupportedError(err) {\n\t\tswitch c.directive {\n\t\tcase metav1.FieldValidationStrict:\n\t\t\treturn c.schema.ValidateBytes(data)\n\t\tcase metav1.FieldValidationWarn:\n\t\t\tklog.Warningf(\"cannot perform warn validation if server-side field validation is unsupported, skipping validation\")\n\t\tdefault:\n\t\t\t// can't be reached\n\t\t\tklog.Warningf(\"unexpected field validation directive: %s, skipping validation\", c.directive)\n\t\t}\n\t\treturn nil\n\t}\n\treturn err\n}", "docstring": "// ValidateBytes validates bytes per a ParamVerifyingSchema", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/k8s.io/kubectl/pkg/validation/schema.go#L129-L154", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetNode", "code": "func GetNode(root *yaml.Node, path ...string) (*yaml.Node, bool, error) {\n\tresNode, restPath, err := asCloseAsPossible(root, path...)\n\tif err != nil {\n\t\treturn nil, false, err\n\t}\n\t// more path means the node didn't exist\n\tif len(restPath) != 0 {\n\t\treturn nil, false, nil\n\t}\n\treturn resNode, true, nil\n}", "docstring": "// GetNode gets the node at the given path in the given sequence of mapping\n// nodes, or, if it doesn't exist, returning false.", "url": "https://github.com/guardllamanet/guardllama/blob/26ddaa8e627f6b2a5570f669e34976931be8db74/vendor/sigs.k8s.io/controller-tools/pkg/schemapatcher/internal/yaml/nested.go#L77-L87", "sha": "26ddaa8e627f6b2a5570f669e34976931be8db74"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "main", "code": "func main() {\n\tx := Xenia{\n\t\tHost: \"localhost:8000\",\n\t\tTimeout: time.Second,\n\t}\n\n\tp := Pillar{\n\t\tHost: \"localhost:9000\",\n\t\tTimeout: time.Second,\n\t}\n\n\tif err := Copy(&x, &p, 3); err != io.EOF {\n\t\tfmt.Println(err)\n\t}\n}", "docstring": "// =============================================================================", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/_content/tour/eng/composition/decoupling/example6.go#L114-L128", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Wait", "code": "func (s *Server) Wait() error {\n\n\t// ΠΡΟΣΠΟΙΗΘΕΙΤΕ ΟΤΙ ΥΠΑΡΧΕΙ ΣΥΓΚΕΚΡΙΜΕΝΗ ΥΛΟΠΟΙΗΣΗ.\n\treturn nil\n}", "docstring": "// Η Wait αποτρέπει τον server να δεχτεί νέες συνδέσεις.", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/_content/tour/grc/composition/pollution/example2.go#L41-L45", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "sendNotification", "code": "func sendNotification(n notifier) {\n\tn.notify()\n}", "docstring": "// sendNotification accepts values that implement the notifier\n// interface and sends notifications.", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/_content/tour/tur/interfaces/example3.go#L46-L48", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "SmallestComponent", "code": "func (v PreciseVector) SmallestComponent() Axis {\n\tt := v.Abs()\n\n\tif t.X.Cmp(t.Y) < 0 {\n\t\tif t.X.Cmp(t.Z) < 0 {\n\t\t\treturn XAxis\n\t\t}\n\t\treturn ZAxis\n\t}\n\tif t.Y.Cmp(t.Z) < 0 {\n\t\treturn YAxis\n\t}\n\treturn ZAxis\n}", "docstring": "// SmallestComponent returns the axis that represents the smallest component in this vector.", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/vendor/github.com/golang/geo/r3/precisevector.go#L185-L198", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "CheckPath", "code": "func CheckPath(path string) (err error) {\n\tdefer func() {\n\t\tif err != nil {\n\t\t\terr = &InvalidPathError{Kind: \"module\", Path: path, Err: err}\n\t\t}\n\t}()\n\n\tif err := checkPath(path, modulePath); err != nil {\n\t\treturn err\n\t}\n\ti := strings.Index(path, \"/\")\n\tif i < 0 {\n\t\ti = len(path)\n\t}\n\tif i == 0 {\n\t\treturn fmt.Errorf(\"leading slash\")\n\t}\n\tif !strings.Contains(path[:i], \".\") {\n\t\treturn fmt.Errorf(\"missing dot in first path element\")\n\t}\n\tif path[0] == '-' {\n\t\treturn fmt.Errorf(\"leading dash in first path element\")\n\t}\n\tfor _, r := range path[:i] {\n\t\tif !firstPathOK(r) {\n\t\t\treturn fmt.Errorf(\"invalid char %q in first path element\", r)\n\t\t}\n\t}\n\tif _, _, ok := SplitPathVersion(path); !ok {\n\t\treturn fmt.Errorf(\"invalid version\")\n\t}\n\treturn nil\n}", "docstring": "// CheckPath checks that a module path is valid.\n// A valid module path is a valid import path, as checked by [CheckImportPath],\n// with three additional constraints.\n// First, the leading path element (up to the first slash, if any),\n// by convention a domain name, must contain only lower-case ASCII letters,\n// ASCII digits, dots (U+002E), and dashes (U+002D);\n// it must contain at least one dot and cannot start with a dash.\n// Second, for a final path element of the form /vN, where N looks numeric\n// (ASCII digits and dots) must not begin with a leading zero, must not be /v1,\n// and must not contain any dots. For paths beginning with \"gopkg.in/\",\n// this second requirement is replaced by a requirement that the path\n// follow the gopkg.in server's conventions.\n// Third, no path element may begin with a dot.", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/vendor/golang.org/x/mod/module/module.go#L309-L341", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "accept", "code": "func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) {\n\tr0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))\n\tfd = int(r0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/vendor/golang.org/x/sys/unix/zsyscall_freebsd_riscv64.go#L49-L56", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Adjtime", "code": "func Adjtime(delta *Timeval, olddelta *Timeval) (err error) {\n\t_, _, e1 := Syscall(SYS_ADJTIME, uintptr(unsafe.Pointer(delta)), uintptr(unsafe.Pointer(olddelta)), 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/vendor/golang.org/x/sys/unix/zsyscall_freebsd_riscv64.go#L451-L457", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Fchmodat", "code": "func Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := Syscall6(SYS_FCHMODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/vendor/golang.org/x/sys/unix/zsyscall_dragonfly_amd64.go#L657-L668", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Setrtable", "code": "func Setrtable(rtable int) (err error) {\n\t_, _, e1 := syscall_rawSyscall(libc_setrtable_trampoline_addr, uintptr(rtable), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_setresuid setresuid \"libc.so\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go#L1946-L1952", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Faccessat", "code": "func Faccessat(dirfd int, path string, mode uint32, flags int) (err error) {\n\tvar _p0 *byte\n\t_p0, err = BytePtrFromString(path)\n\tif err != nil {\n\t\treturn\n\t}\n\t_, _, e1 := syscall_syscall6(libc_faccessat_trampoline_addr, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)\n\tif e1 != 0 {\n\t\terr = errnoErr(e1)\n\t}\n\treturn\n}", "docstring": "//go:cgo_import_dynamic libc_exit exit \"/usr/lib/libSystem.B.dylib\"\n// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.go#L1137-L1148", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "consumeFixed32Ptr", "code": "func consumeFixed32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) {\n\tif wtyp != protowire.Fixed32Type {\n\t\treturn out, errUnknown\n\t}\n\tv, n := protowire.ConsumeFixed32(b)\n\tif n < 0 {\n\t\treturn out, errDecode\n\t}\n\tvp := p.Uint32Ptr()\n\tif *vp == nil {\n\t\t*vp = new(uint32)\n\t}\n\t**vp = v\n\tout.n = n\n\treturn out, nil\n}", "docstring": "// consumeFixed32Ptr wire decodes a *uint32 pointer as a Fixed32.", "url": "https://github.com/ardanlabs/gotour/blob/6599a0b6d88dd03619c8bc4c5f53e59c70c36208/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go#L3486-L3501", "sha": "6599a0b6d88dd03619c8bc4c5f53e59c70c36208"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "ReregisterEntity", "code": "func (_ *RESTProvider) ReregisterEntity(\n\t_ context.Context, _ minderv1.Entity, _ *properties.Properties,\n) error {\n\t// TODO: implement\n\treturn nil\n}", "docstring": "// ReregisterEntity implements the Provider interface", "url": "https://github.com/mindersec/minder/blob/3cb22b6aa37dd3f54af46c3156dd09c4eda60105/internal/providers/testproviders/rest.go#L86-L91", "sha": "3cb22b6aa37dd3f54af46c3156dd09c4eda60105"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "GetNiceStatus", "code": "func GetNiceStatus(code codes.Code) *NiceStatus {\n\ts := &NiceStatus{}\n\treturn s.SetCode(code)\n}", "docstring": "// GetNiceStatus get a nice status from the code.", "url": "https://github.com/mindersec/minder/blob/3cb22b6aa37dd3f54af46c3156dd09c4eda60105/internal/util/statuses.go#L29-L32", "sha": "3cb22b6aa37dd3f54af46c3156dd09c4eda60105"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "nameTableIdx", "code": "func (p *PPU) nameTableIdx(addr uint16) uint {\n\tvar (\n\t\tidx = (addr - 0x2000) / 0x0400\n\t\tmode = p.cart.MirrorMode()\n\t)\n\n\tswitch mode {\n\tcase ines.MirrorHorizontal:\n\t\tswitch idx {\n\t\tcase 0, 1:\n\t\t\treturn 0\n\t\tdefault:\n\t\t\treturn 1\n\t\t}\n\tcase ines.MirrorVertical:\n\t\tswitch idx {\n\t\tcase 0, 2:\n\t\t\treturn 0\n\t\tdefault:\n\t\t\treturn 1\n\t\t}\n\tcase ines.MirrorSingle0:\n\t\treturn 0\n\tcase ines.MirrorSingle1:\n\t\treturn 1\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"invalid mirroring mode: %d\", mode))\n\t}\n}", "docstring": "// nameTableIdx returns the index of the nametable (0 or 1) for the given vram\n// address, based on the cartridge’s mirroring mode.", "url": "https://github.com/maxpoletaev/dendy/blob/19c75f9a5b6e5b891a0c458c71a5eadbb25fddab/ppu/ppu.go#L231-L259", "sha": "19c75f9a5b6e5b891a0c458c71a5eadbb25fddab"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "TestSystem", "code": "func TestSystem(t *testing.T, makeSystem MakeSystem) {\n\tt.Run(\"file\", file.runFunc(makeSystem))\n\tt.Run(\"proc\", proc.runFunc(makeSystem))\n\tt.Run(\"poll\", poll.runFunc(makeSystem))\n\tt.Run(\"socket\", socket.runFunc(makeSystem))\n}", "docstring": "// TestSystem is a test suite which validates the behavior of wasi.System\n// implementations.", "url": "https://github.com/dispatchrun/wasi-go/blob/038d5104aacbb966c25af43797473f03c5da3e4f/wasitest/system.go#L14-L19", "sha": "038d5104aacbb966c25af43797473f03c5da3e4f"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "AddReservation", "code": "func (c *constraints) AddReservation(p peer.ID, a ma.Multiaddr) error {\n\tc.mutex.Lock()\n\n\tnow := time.Now()\n\tc.cleanup(now)\n\n\tif len(c.total) >= c.rc.MaxReservations {\n\t\tc.mutex.Unlock()\n\t\treturn errTooManyReservations\n\t}\n\n\tip, err := manet.ToIP(a)\n\tif err != nil {\n\t\tc.mutex.Unlock()\n\t\treturn errors.New(\"no IP address associated with peer\")\n\t}\n\n\tpeerReservations := c.peers[p]\n\tif len(peerReservations) >= c.rc.MaxReservationsPerPeer {\n\t\tc.mutex.Unlock()\n\t\treturn errTooManyReservationsForPeer\n\t}\n\n\tipReservations := c.ips[ip.String()]\n\tif len(ipReservations) >= c.rc.MaxReservationsPerIP {\n\t\tc.mutex.Unlock()\n\t\treturn errTooManyReservationsForIP\n\t}\n\n\tvar asnReservations []time.Time\n\tvar asn uint32\n\tif ip.To4() == nil {\n\t\tasn = asnutil.AsnForIPv6(ip)\n\t\tif asn != 0 {\n\t\t\tasnReservations = c.asns[asn]\n\t\t\tif len(asnReservations) >= c.rc.MaxReservationsPerASN {\n\t\t\t\tc.mutex.Unlock()\n\t\t\t\treturn errTooManyReservationsForASN\n\t\t\t}\n\t\t}\n\t}\n\n\texpiry := now.Add(validity)\n\tc.total = append(c.total, expiry)\n\n\tpeerReservations = append(peerReservations, expiry)\n\tc.peers[p] = peerReservations\n\n\tipReservations = append(ipReservations, expiry)\n\tc.ips[ip.String()] = ipReservations\n\n\tif asn != 0 {\n\t\tasnReservations = append(asnReservations, expiry)\n\t\tc.asns[asn] = asnReservations\n\t}\n\tc.mutex.Unlock()\n\treturn nil\n}", "docstring": "// AddReservation adds a reservation for a given peer with a given multiaddr.\n// If adding this reservation violates IP constraints, an error is returned.", "url": "https://github.com/QuilibriumNetwork/ceremonyclient/blob/c3ebffc519ce29a88cc6fe14b3b39049c4989f34/go-libp2p/p2p/protocol/circuitv2/relay/constraints.go#L49-L106", "sha": "c3ebffc519ce29a88cc6fe14b3b39049c4989f34"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "Delete", "code": "func (set *IdSet) Delete(atom Atom) bool {\n\tswitch a := atom.(type) {\n\tcase *Vertex:\n\t\tif _, exists := set.atoms[a.GetID()]; exists {\n\t\t\tdelete(set.atoms, a.GetID())\n\t\t\treturn true\n\t\t}\n\tcase *Hyperedge:\n\t\tif _, exists := set.atoms[a.GetID()]; exists {\n\t\t\tdelete(set.atoms, a.GetID())\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}", "docstring": "// Delete removes an atom from the IdSet and returns true if the atom was\n// present.", "url": "https://github.com/QuilibriumNetwork/ceremonyclient/blob/c3ebffc519ce29a88cc6fe14b3b39049c4989f34/node/hypergraph/inmem/types.go#L106-L120", "sha": "c3ebffc519ce29a88cc6fe14b3b39049c4989f34"} +{"repo_name": "", "dataset": "github_2023", "owner": "", "lang": "", "func_name": "WithReason", "code": "func WithReason(ctx context.Context, reason Reason) context.Context {\n\tinfo := infoFromCtx(ctx)\n\tinfo.reason = reason\n\treturn withInfo(ctx, info)\n}", "docstring": "// WithReason creates a context that has an associated Reason (which ends up in\n// traces created under that context).", "url": "https://github.com/QuilibriumNetwork/ceremonyclient/blob/c3ebffc519ce29a88cc6fe14b3b39049c4989f34/pebble/objstorage/objstorageprovider/objiotracing/obj_io_tracing_on.go#L270-L274", "sha": "c3ebffc519ce29a88cc6fe14b3b39049c4989f34"}