{"docstore/metadata": {"/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py__Configuration_file_for__exclude_patterns._": {"doc_hash": "65424849dc26d8005aacb9bbd57a68b3d3fdaec4fc28056a9d6363073b2a0813"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py_generate_apidocs_generate_apidocs.subprocess_check_call_": {"doc_hash": "10a8ce9e198196367aa18a06320178578357ac476ce8abc0631470afa091b5d6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py_setup_": {"doc_hash": "96d7c1a913c5e5b19f801f57244bfabf4a1b6fb98c4828c027a7e809c7899399"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/__init__.py_os_": {"doc_hash": "760b9381bfddaf91822efab337820ebb61386057e3ca91c1de27ecfd8051308e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py__This_file_helps_to_comp_sys": {"doc_hash": "ae5674c5e0be0b96a99d8bbee67455b7bf99dd4aace251466af628cbd40c962e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_get_keywords_get_keywords.return.keywords": {"doc_hash": "87b91a61467dec11a0f5dbd405a26ea6e941be28cf983d33435a27de60840d7f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_VersioneerConfig_register_vcs_handler.return.decorate": {"doc_hash": "e7b4c413420038d0ed1830f98eacb147e5e649aa3425a1a612424583dea78728"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_run_command_run_command.return.stdout_p_returncode": {"doc_hash": "2a25c770a5c8893ea679e20337afa6c11a943e350ae7c6492873045a356c33e3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_versions_from_parentdir_versions_from_parentdir.raise_NotThisMethod_root": {"doc_hash": "e47e525037bc2ce6193b5076ff665e346e55f7c0c9f068cb139e329946e70db1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_get_keywords_git_get_keywords.return.keywords": {"doc_hash": "bb13ca9fef81c111de6c05b5abcafb2fb4e1c6f8d44fc541af7762d7ad8dabd3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_versions_from_keywords_git_versions_from_keywords.return._version_0_unknown_": {"doc_hash": "bce2c8e56390e5ec5d2b067c92ec4fc9bf5747e95fa7fa57616af8cd70247bce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_pieces_from_vcs_git_pieces_from_vcs.return.pieces": {"doc_hash": "9dfe87dc4569270b529e1894d38f40d29e872a0514807adecf83ce094beeb615"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_plus_or_dot_render_pep440.return.rendered": {"doc_hash": "179e54ddc2f68ab800a32ab8a3aa4824128450a9304cf1ff5cf0633b10b38146"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_old_render_pep440_old.return.rendered": {"doc_hash": "842e375e39745e7489a13633cac168542114a64242c56a8d10d440d430fe1a2a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_git_describe_render_git_describe.return.rendered": {"doc_hash": "ca541fbf5be193f49c52a416bac7e1d201efd978e8208ed40a48b29431f0b56e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_git_describe_long_render_git_describe_long.return.rendered": {"doc_hash": "b4ee82712caa15ee150826c967776c9a320b1219fe2c0a210345babe4ec42a0d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_render.return._version_rendered_fu": {"doc_hash": "0f4687c560f37254bb87c5c3a17afa33761af6a46c8e77a09c24b698b000c7fd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_get_versions_": {"doc_hash": "5b12f0f654d66aa0ffa27265afb6e1ed67f3e40ddb691234d16678c9fc0083d9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset_DecathlonDataset._": {"doc_hash": "446688ca7a4e3b3ca48f24f23d854a819fcec5917d26938cfd3cbe9c13a2cde0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset.resource_DecathlonDataset.md5._": {"doc_hash": "8b2a68f6db6d7808a53ed532e3f7c638409fbb60194f495e7887a6712ec45e73"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/__init__.py_CSVSaver_": {"doc_hash": "73abef87fcd9858c3521051429367ac764c9ff2f620a88b469fab9994d590443"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.finalize_CSVSaver.finalize.with_open_self__filepath_.for_k_v_in_self__cache_d.f_write_n_": {"doc_hash": "141e90b4ed726d49e0316638fe2972b4dc873663a0acbdfd941e1cc6424b3192"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.save_CSVSaver.save.self__cache_dict_save_key": {"doc_hash": "af328047ed3d0c55633dbcd7f4becacb57a4ad60994f1915dc5c94a8948e8524"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.save_batch_": {"doc_hash": "4765a3093ecf1c6a388f0f00c58eddc8b6b8ade09544a711df9c6a89df3c2326"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_saver.py_NiftiSaver.save_NiftiSaver.save.write_nifti_": {"doc_hash": "22c89155a6f1c28642a2b3da569e58facb3f22d0928befd435cb4428b6bda1c7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_saver.py_NiftiSaver.save_batch_": {"doc_hash": "8afb576b50f839e09e022de3cd020b3aee1bce1d960970492d74cda6ad7d6d81"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_writer.py_from_typing_import_Option_write_nifti._": {"doc_hash": "555f1a1fd447f563eeef9bfca8814678f2ba6bbe90e0b2a8a42edf7d032466ff"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_writer.py_write_nifti.if_data_ndim_3_mult_": {"doc_hash": "51c3efbf79ffec3e614eccee161ffd4c0b23421f638dc9a9cec0e5eee8d11a67"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_PNGSaver.save_PNGSaver.save.write_png_": {"doc_hash": "49f02c92032d5ca6d16d1d36c1291be3b944e72a1c0cdd384066a7fefcb0af19"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_PNGSaver.save_batch_": {"doc_hash": "3ad131fba7fdc30799ff70ed52f0b65c85846603429b6b144da8031ee8b09321"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_writer.py_from_typing_import_Option_": {"doc_hash": "95a0ee0bc4620a1ad992df0a82a9d7edb64e8dc43fb6a7e781a0fe065b52e071"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/synthetic.py_from_typing_import_Option_create_test_image_2d.return.noisyimage_labels": {"doc_hash": "e1261e1838bc89338e270a6307451e906cb83d04c997ba6f2a6e93672b2bafa4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/synthetic.py_create_test_image_3d_": {"doc_hash": "a0881560923dc0072243ef2eda96abdd8743c4b3cb8af4d29758c284ba252f12"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_iter_patch_slices_iter_patch_slices.for_position_in_product_.yield_tuple_slice_s_s_": {"doc_hash": "09adc8f4e58daea6dce225e9c42840a852e09fe7e201242e435e94f04f01ca60"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_iter_patch_iter_patch.if_copy_back_.arr_arrpad_slices_": {"doc_hash": "2bb272e862bfb1239fe3d7dbc4049e1f49c5bebd3f3cfa223a43d3c843c16262"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_get_valid_patch_size_get_valid_patch_size.return.tuple_min_ms_ps_or_ms_f": {"doc_hash": "35fe64943f900259d20a8956ca9cf3002209eadeeebfa11e12ae91a7894ab06d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_correct_nifti_header_if_necessary_correct_nifti_header_if_necessary.return.img_nii": {"doc_hash": "71d6c03268f650f0e1d60d3f72eca58182c0e311ec246bf03f3c2eb6d79a7610"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_rectify_header_sform_qform_rectify_header_sform_qform.return.img_nii": {"doc_hash": "bba57837a85dacfcfc3b2df0e0243a9de35b7ca5febc850ade70fb381c7887af"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_zoom_affine_zoom_affine.return.new_affine": {"doc_hash": "edf10f459ced8df85a739463aae371291aefa3f32fac658120aa7450feaba122"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_compute_shape_offset_compute_shape_offset.return.out_shape_astype_int_of": {"doc_hash": "a572ca1eb500c486df0055ccf4ec10d9b1b26f37d4c6840c7803e210ab00ce07"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_to_affine_nd_to_affine_nd.return.new_affine": {"doc_hash": "2c6505165e0156e96917918b3d49bfc58ace4bc550ad8e9a7e0f1524b2853e70"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_EnsembleEvaluator._iteration_": {"doc_hash": "1594e8088e36550e50726d4597955a3bbf73c4b9bc653abe7a33790ada545e95"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_create_multigpu_supervised_trainer_create_multigpu_supervised_trainer.return.create_supervised_trainer": {"doc_hash": "912180221de1de21ff89a84443dd752d1e201bc05402a299d7212a67bb44c76b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_create_multigpu_supervised_evaluator_": {"doc_hash": "51861ac58dad679fee117ea7fba629534bac9bc16c687b389d35196738ffa20e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_Trainer_Trainer.get_train_stats.return._total_epochs_self_sta": {"doc_hash": "5b688b79ae43b609f2f3bde7c8c3f5362c7ff9a74337b677ac022d5bf6f53224"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/workflow.py_Workflow_Workflow._": {"doc_hash": "d702b2a4dad58cdd3f6646d6493e3ee8a44c4018ada0c7cf2139a51af37bea40"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/__init__.py_CheckpointLoader_": {"doc_hash": "39acbe4ef16ad4f4da276780851997c051cc92d99d45aee6822f47ee0f92ba28"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver_CheckpointSaver._": {"doc_hash": "812cf498735b6d92f0192d1ee4ca9762e7ff40d660089c6f025b1216992484ac"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.attach_CheckpointSaver.attach.if_self__interval_checkpo.if_self_epoch_level_.else_.engine_add_event_handler_": {"doc_hash": "cebcb1dd467255ba80392a282d73ca2cd398a7ad1bc8d65b41bdd345f051c469"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/lr_schedule_handler.py_LrScheduleHandler_": {"doc_hash": "ac579b9215289a6fac628af996ed08a512cb06d6a7720d25473c84dc81a32a06"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_SegmentationSaver_SegmentationSaver.__init__.self._name.name": {"doc_hash": "d0607d729c7ce6dd76ac84fddf4081dfa12392b5290d78aa4b75740d8cfe3d29"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_SegmentationSaver.attach_": {"doc_hash": "817723db9f808b08a93743af0b91aea552f84d232b8de8ddfb4997dd7f667ee2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler_StatsHandler.__init__.if_logger_handler_is_not_.self_logger_addHandler_lo": {"doc_hash": "45f42eca3eb4cf1e0a9eb6e5a587b8585631e32a36cf8b51fcab4680b8df7c50"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.attach_StatsHandler.attach.None_3.engine_add_event_handler_": {"doc_hash": "7d55afd0a799637cb76a410aa3f69ad84183de51cf3af240e22adf0ce1f03f82"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler._default_epoch_print_StatsHandler._default_epoch_print.None_1": {"doc_hash": "d249273b1c6443c06e6ca53fb7797fdf60abe358ddacfa06f7dc6b519235be1c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler._default_iteration_print_": {"doc_hash": "047dafdc756001495e889134313d6c8b4f7c4f8efd36bc7eefda0515b73cddbe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler_TensorBoardStatsHandler.__init__.self.tag_name.tag_name": {"doc_hash": "0e115141de5f98feee0b018c67b7e050ee7b7ad78bee1a681a58cd77d20b4a27"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler.attach_TensorBoardStatsHandler.attach.None_1.engine_add_event_handler_": {"doc_hash": "0ce5a90f6c069e3abb6b4a4bf7adac4553e00121dc1a5b095b85fe52b46035ab"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler.epoch_completed_TensorBoardStatsHandler.iteration_completed.if_self_iteration_event_w.else_.self__default_iteration_w": {"doc_hash": "4ec78ab8da4916ee2787ef9705f927486901ce4e14af87fc5651f132779bbaca"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler._default_epoch_writer_TensorBoardStatsHandler._default_epoch_writer.writer_flush_": {"doc_hash": "cd5c84d3ec95b47d940cec2b312a77cfff4004e1f10bbbd83036bcd2e5eb0b13"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler._default_iteration_writer_TensorBoardStatsHandler._default_iteration_writer.writer_flush_": {"doc_hash": "7596669a770b6272035989a5318a370d4921fab84cfed90016fe2e4ce66cb633"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardImageHandler_TensorBoardImageHandler.__init__.self.max_channels.max_channels": {"doc_hash": "b92b5323ae27bbc9c9c9f3f9adc4e6f95d4c816da3ce2a59e7b4a555fc2ec4dd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardImageHandler.attach_": {"doc_hash": "aa04f1fa61815272ee7504f31e01b1e9d6e48a6dd4a1216c9e0525e79da3b655"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/validation_handler.py_from_typing_import_TYPE_C_": {"doc_hash": "14d4d556e4205e0266cfc5c48b829aa27c074ca2c6dc9975ec1105a41e9e2a0f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/utils.py__get_scan_interval_": {"doc_hash": "78961b565bd42ad9bedede4b640448d7860fb4cfdeb6485cc6694bb73d55e7fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_DiceLoss.forward_DiceLoss.forward.return.f": {"doc_hash": "25d63239c4f6bbbfd3603b8c57785d2c236d31ea2487bff29f6b1a41fb292235"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/tversky.py_TverskyLoss.forward_": {"doc_hash": "315b6e0ff64ab8eda6a101afe7acc974ffb0b53cb78f15a738159938cf2d9221"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/downsample.py_from_typing_import_Option_": {"doc_hash": "d4d49a9a8bcc64c5b87bf6f7089e66586f111c4c30c8ef727e6ce06bc28f28ea"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_from_typing_import_Any_C_LayerFactory.add_factory_callable.self.__doc__._": {"doc_hash": "0ce5e5a6525af46e7144d6ffea6ea8df844251e1d8b5d613b04f33c67c58c663"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_LayerFactory.factory_function_LayerFactory.get_constructor.return.fact_args_": {"doc_hash": "642c4ff14bb7576a4dfa0a923a7ae85fdf24f68ace664d71e211b20f42e6efcd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_LayerFactory.__getitem___LayerFactory.__getattr__.return.super___getattribute___": {"doc_hash": "7f669f5b46fbdfa4fcdd5e7e7ec960fe524d60cca6f84bf2deb47ae2596abbd9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_from_typing_import_Option_Classifier.__init__.if_last_act_is_not_None_.self_final_add_module_la": {"doc_hash": "c9ad7fbec44242c8b868e1d9bfa477e755295291270a263a8100abbf388b8f4c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_Discriminator_Discriminator.__init__.super___init___in_shape": {"doc_hash": "f7ccc05de4644310159d48b100ebfd1eafc2a5a5b7c0481e394e828d7a1a866c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_Critic_": {"doc_hash": "8c0eebc31dc1f73c6d81eb0429789d8c1254473144b317eefe370e0a83625038"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py__DenseBlock__DenseBlock.__init__.for_i_in_range_layers_.self_add_module_denselay": {"doc_hash": "de58d6a825da91f4ba9ac313d55438ecc0979bf9fbf867ccf5db5b5c6b84f967"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py__Transition__Transition.__init__.self_add_module_pool_p": {"doc_hash": "6a10fef7cc8d79fb27c9e9a60321e802e000731cd14701b480cc8b33d34e03b9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet_DenseNet._": {"doc_hash": "324aedbac7abd20bbe25d295b01d3df50a27e03a4f2c11ab3f2469f750818ce8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet.__init___DenseNet.forward.return.x": {"doc_hash": "d05ff1d1ace3b824ebe1156e5d88e47456bef9d70f8f2a5104e65056357581cf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_from_typing_import_Option_Generator._": {"doc_hash": "0b4b0e5f3e033d87e74010cbbd17e9b157514932cca6d305eca1d489672938bd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_Generator.__init___Generator.__init__.for_i_c_s_in_enumerat.echannel.c": {"doc_hash": "52f2d39f0cd31601c71546bcafa251ed8779699703f9217871b1203a8e77898e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_Generator._get_layer_": {"doc_hash": "90207e4cb3b260b52b0aac332b4935e2bd49922d175c3a89c7fe84173aa38f5f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_HighResNet_HighResNet._": {"doc_hash": "1729e02d44165d7d82a9bca71c81bf6711011faf4985bfd890db829f9e2ec781"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_HighResNet.__init___": {"doc_hash": "b47ff1851ce1dfd7557af043587ac051cf8711545032aab230bca0cbfe7dcbbc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regressor.py_from_typing_import_Option_Regressor.__init__.self.final.self__get_final_layer_ec": {"doc_hash": "1cb4efa6fd9f92da8bf878e1778f7d527f55328fe8be9b00355453f062a1213d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regressor.py_Regressor._get_layer_": {"doc_hash": "ea77ddccc641443b4a808bc67407483a0970fe92238a6d917d118695e0e39ed6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_UNet._get_down_layer_UNet._get_bottom_layer.return.self__get_down_layer_in_c": {"doc_hash": "888e22701f8c8d602d631b7765812030e00ad6f06fadaafd6c954e29e121bbfc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_UNet._get_up_layer_": {"doc_hash": "d55ac29317ef503b41c6fc45548c4a272543c24ad235d2bb49fe1039d6179751"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_warnings_one_hot.return.labels": {"doc_hash": "10aef97d62e6df456d5a5a3e98dad4d6dc34c29396d88916806d0d13390c89d2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_normalize_transform_normalize_transform.return.norm": {"doc_hash": "a91f8ac1d3743447289da8bf2338daa420ef4904d4fc727de58e300f61a8af03"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_adaptor._inner_adaptor.return._inner": {"doc_hash": "788a31a193a148e651e3512fdc2282edcb2b5afd893c4de187c735020f7fb7a6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_apply_alias_to_kwargs.return._inner": {"doc_hash": "119b24a527ed62673608fd476b7c77e359e81d0902e36fe6dc721916ae135ee6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_FunctionSignature_": {"doc_hash": "2c76e35889050fac8b55cfabf89b58ee11623333d06e8e6f959149f9003e5fec"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/compose.py_Compose_Compose.__init__.self_set_random_state_see": {"doc_hash": "6874e56563cc98a57640acd32fcf65f13946bb92710fd3231837ae5f34a7c1a0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/__init__.py__": {"doc_hash": "c23c3b22e4a81f73f6cb3229b775eae033de918fd6d04619cce6fa6ee87e622a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_BorderPad_BorderPad.__init__.self.mode.NumpyPadMode_mode_": {"doc_hash": "9a0ade44ba35be00a4272ed94298d31611eaa65d5da928c9586e6e3b55c85292"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_BorderPad.__call___BorderPad.__call__.return.np_pad_": {"doc_hash": "30711ffc4ff9779d8b222510571895f27fe6711b8e5829c2b9084e8ba48e3709"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_DivisiblePad_DivisiblePad.__init__.self.mode.NumpyPadMode_mode_": {"doc_hash": "b0020f7b88be28c5a281d6452b73bf07f08b08fb23b860c1fe87796937fe4a99"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_DivisiblePad.__call___DivisiblePad.__call__.return.spatial_pad_img_": {"doc_hash": "85f64a334e3c42a591fd99576c01f3b457132cf5a90430f3ec055ab08288af0b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_CenterSpatialCrop_CenterSpatialCrop.__call__.return.cropper_img_": {"doc_hash": "8369b3c50b2e2ba3078d78c12e46e33a164d40135e0a6582d12c68ec48a9bbbe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandSpatialCrop_RandSpatialCrop.__init__.self._slices.None": {"doc_hash": "342fdaa230f0e553fe13972706e29131f662281af4cfac93e5760d6bcebe75fd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandSpatialCropSamples_RandSpatialCropSamples.__call__.return._self_cropper_img_for___": {"doc_hash": "26d19408206586e73e4cdc71281f8fcb66d7d2aa7db0fe79835d6b87063f1111"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialPadd_SpatialPadd.__call__.return.d": {"doc_hash": "6ac0cf6a4e2f0ba58ccf88beb5838f5950e07c9be5e4c5a18fdd4e69eab2a1d7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_BorderPadd_BorderPadd.__call__.return.d": {"doc_hash": "48b77d65abc43f6a0b8ea1b13831e201d209f27261c1906d012f6e48bf0f9ba4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_DivisiblePadd_DivisiblePadd.__call__.return.d": {"doc_hash": "6ddeba8520b1c961e90fa4fd52ca3b8bd08944ebe011d9c1eaa2be325b40963a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialCropd_SpatialCropd.__call__.return.d": {"doc_hash": "d3e841b32e1fcb6f329554a892ded4c011bd16bf91db6751f70d835b902226b4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_CenterSpatialCropd_CenterSpatialCropd.__call__.return.d": {"doc_hash": "698fa11a0290cde87d7e62c3c35061539e70eb6b19df216db07c54535d8f1658"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd_RandSpatialCropd.__init__.self._size.None": {"doc_hash": "1453b81bd5138de339a87b457771ea58888b30534690fd13edda175c423e7584"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.__call___RandCropByPosNegLabeld.__call__.return.results": {"doc_hash": "1c0596846191f67366b5e82b2a41f67bd701516a82d487e12a30c572e95ed430"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialPadD_": {"doc_hash": "aedef2bc6d957b11fcdbe7cbe7f45e350472791a1d0873fa45ac0df47521699d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/__init__.py__": {"doc_hash": "bff2377319ba4065211f7fda928cea2b03eb3bd5ba0890e9b2401f81cf62030b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandScaleIntensity_RandScaleIntensity.__call__.return.scaler_img_": {"doc_hash": "ad9ce3d2624609edf32464b7dab924f25d9f2dcf47d846eb8f2459b717a41b46"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRange_ScaleIntensityRange.__init__.self.clip.clip": {"doc_hash": "a9c00efe8eeb4dea56284f89cd4b33114e40f6927e4fe6289b9aa7ba5e82b158"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRange.__call___ScaleIntensityRange.__call__.return.img": {"doc_hash": "1fe05b90afeed7cb071ffe5d1d85602b9d51ac524f8ba0688e6d50276019de57"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_AdjustContrast_AdjustContrast.__call__.return.np_power_img_img_min_": {"doc_hash": "37f22d1982ca8af8ea8602b164f87f6a541dccfe040830596ab9519babfaddf3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandAdjustContrast_RandAdjustContrast.__call__.return.adjuster_img_": {"doc_hash": "531afaacd0d13cadf599c6632fda038dc754d8dd16434caba5e05caf5e7dc7c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles_ScaleIntensityRangePercentiles._": {"doc_hash": "138d214c4d4ed40bbdedf475a9334379536f7ec93c0a76cda2764908289fa09b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles.__init___ScaleIntensityRangePercentiles.__init__.self.relative.relative": {"doc_hash": "4965b0ce3062856fdb2dbdb9145de1cd6c6e0eb5c1511a615252a5eee798f9fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles.__call___ScaleIntensityRangePercentiles.__call__.return.img": {"doc_hash": "e65a8e752b8de4350c712cab1a01bcda768e200b65799977fff944e8c888c0bf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ShiftIntensityd_ShiftIntensityd.__call__.return.d": {"doc_hash": "2a6439c305d1d36cb18dcad096b370b492368adcc80a13da9bd232f08b98fa47"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandShiftIntensityd_RandShiftIntensityd.__call__.return.d": {"doc_hash": "09c15129050732ddf2c1d7437fb0e77daf749aca30e626ef987aa5b83dcc4db4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityd_ScaleIntensityd.__call__.return.d": {"doc_hash": "1e79511a74d5dd0608f60a8d4bcd81232a49fa92185fcc05f43f2b1d3c2f66ce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandScaleIntensityd_RandScaleIntensityd.__call__.return.d": {"doc_hash": "d7d88a6d576a6e5f08282b56a56ab2e8eb69c28503ad7659b02e9aa1dad17783"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_NormalizeIntensityd_NormalizeIntensityd.__call__.return.d": {"doc_hash": "bb2130aa627a8316c8be0a1d24018770eb0f4980ac045bfa01189d192b137e6c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ThresholdIntensityd_ThresholdIntensityd.__call__.return.d": {"doc_hash": "34550e50d315d0d5241e1fc1976984b3bcd0384f12cf91a244e20c5b748aa62f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityRanged_ScaleIntensityRanged.__call__.return.d": {"doc_hash": "51455513948359084be1b0d62f3a4ddd91d4e4b8a6ac19d7c77695b751fa7a11"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_AdjustContrastd_AdjustContrastd.__call__.return.d": {"doc_hash": "888ee6d970762982165ebeb89aa8021ffdfe2dc6005a7c01fdd8dfc77190bd2e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandAdjustContrastd_RandAdjustContrastd.__call__.return.d": {"doc_hash": "f630f564f2abfc39c9ff2b6879ef7a1d029d3cb0428076434569da36bb83d3e5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityRangePercentilesd_ScaleIntensityRangePercentilesd.__call__.return.d": {"doc_hash": "de61ad18e4173f1e1c21007c909e54000733575c546ad51bcb1adade167aafda"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_MaskIntensityd_MaskIntensityd.__call__.return.d": {"doc_hash": "bab838def93a26849d1e7b94b1ff4632ae6b4f8a288535b0a2e3fc7f85d5db82"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianNoiseD_": {"doc_hash": "176fa826bf19427a3e465b1ee737ab3643f0d17ffd232f5c8b2fc926603ea1d0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/__init__.py__": {"doc_hash": "c7ecd1fe880f43eda25bfb18254cee43f6a4517366d3dae055caf0f318c3164e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/__init__.py__": {"doc_hash": "2aa1cc63a079d41dac679a652f4c5730dde4f0a58beb7aafeff87a474f74429e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_Activations_Activations.__init__.self.other.other": {"doc_hash": "9f650e586c21947d5c241114bcbf47514fd00e862b0e54260aad33b020cac84f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_Activations.__call___Activations.__call__.return.img": {"doc_hash": "28facdae50c4b4370cd9b8c45187dd88352b36fcedc945e8f4e1af82b353201b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_AsDiscrete_AsDiscrete.__init__.self.logit_thresh.logit_thresh": {"doc_hash": "1e17d5d7d7235745ee5fbc5d8c38e3050a16a5cccd4352cdb139651c2724d7e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_AsDiscrete.__call___AsDiscrete.__call__.return.img_float_": {"doc_hash": "d29b187b9fe6b8de0e915ea82026bc43ce59b784d84e2418bcb1866c663a6b96"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent_KeepLargestConnectedComponent._": {"doc_hash": "37e85d7c3a997e9f9cb9cefed5a97d05cb084a1c301aa50b3f5329f5d262f276"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent.__init___KeepLargestConnectedComponent.__init__.self.connectivity.connectivity": {"doc_hash": "b8295cbf9ff9312bc3afc3b43ed5ae633a07afbbe23432fc03d447990eebc35a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent.__call___KeepLargestConnectedComponent.__call__.return.output": {"doc_hash": "13d908937a6d2ae49ba51e8c5fb7b1bf458a61f3dce2fa1a33a953bb7e910f6a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_LabelToContour_LabelToContour.__init__.self.kernel_type.kernel_type": {"doc_hash": "de5ecc0b7185aec4b08236336c5f81f0ff2377afcb6febb090b95cec1ebdbdae"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_LabelToContour.__call___LabelToContour.__call__.return.contour_img": {"doc_hash": "39bbebc071a029d0beb5949182bd12b8b9d8ae2de0a5ed4580611dc3e4cadbd9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_MeanEnsemble_MeanEnsemble.__init__.self.weights.torch_as_tensor_weights_": {"doc_hash": "8e3eb320591a5e30a48819a2b6b45c190cc3e640f4bb202c84f8f2324fbd9772"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_Activationsd_Activationsd.__call__.return.d": {"doc_hash": "ab74c92c922e1b59776026681bb74166a53f7fe8c1d78c38da18b695659a8cb9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_KeepLargestConnectedComponentd_KeepLargestConnectedComponentd.__call__.return.d": {"doc_hash": "1c4275e230981a16d377857db85931ebc2c382f43c9f364221794d8f779f7da9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_LabelToContourd_LabelToContourd.__call__.return.d": {"doc_hash": "c6314e2e9ee5cfc04b1246d3c1476620f1e0708ba5c611675f427afa1deec574"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/__init__.py__": {"doc_hash": "d0d3ad5174a0e484e778a51a7be7261e800006dcbf11cc0d8b40ca762ab4c65e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Spacing_Spacing.__init__.self.dtype.dtype": {"doc_hash": "9fe28adb451a860e44aeec2d790637bce5b0726ffbc6d3d41db97ff3f5d9356c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Orientation_Orientation.__init__.self.labels.labels": {"doc_hash": "3178c69b74047cc9ea274a61460fe02e0959eed353a98bd680de3e2cf07e2b08"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Orientation.__call___Orientation.__call__.return.data_array_affine_new_a": {"doc_hash": "328d35ec6ad36e6186366db3293877550c4c399d531549a05953928ac0d9d228"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Resize_Resize.__init__.self.align_corners.align_corners": {"doc_hash": "f692b94b9b5f56bc7c0cec3fbaab39b13abb31abf0d8c857ec066a8a68d56fe8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Zoom_Zoom.__init__.self.keep_size.keep_size": {"doc_hash": "169159026ecf8a7bf415fb691e9f473ffe10b88bc6fafcb2e19a6e872c0e3969"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Zoom.__call___Zoom.__call__.return.zoomed_tuple_slice_vec_": {"doc_hash": "d1dd92193989415c4733803ef04653c06f33de5d7db3514cb2510cee98237164"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandRotate90_RandRotate90.__call__.return.rotator_img_": {"doc_hash": "59b830682c6075e525e8a7c96664a9b8b34af32480358279403ca0805de39d76"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandFlip_RandFlip.__call__.return.self_flipper_img_": {"doc_hash": "f568e9085323517b6c8449bcf242b16df7e649488af006ee2106bed59671e3ac"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandDeformGrid_RandDeformGrid.randomize.self.rand_mag.self_R_uniform_self_magni": {"doc_hash": "6b7383e56be31faf4058b4080ea2143dccf60e4e9199fccb4474c86a820b0040"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandDeformGrid.__call___RandDeformGrid.__call__.return.control_grid": {"doc_hash": "03338cc7a706663acf0e1ce3c16f89dab2a5c28eb1a7837b3d405ca9bca513e6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Resample_Resample.__init__.self.device.device": {"doc_hash": "fb4154af16cd0e96fb20fd79c8ae9c1f2666ee506818b4fa2e803f3851c9230a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Affine_Affine.__init__.self.padding_mode.GridSamplePadMode_padding": {"doc_hash": "68b2a429a7178600d37e560e72cc95fafcdb744630a3e1ad24aed9a88bda062b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandAffine.set_random_state_RandAffine.__call__.return.self_resampler_": {"doc_hash": "571e39ae73a98b8260cc88a0542375961bf5dfd5da134c52ffe266e945a4fa0b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand2DElastic.set_random_state_Rand2DElastic.randomize.self_rand_affine_grid_ran": {"doc_hash": "df175e42a5099f5531e4473add263bd26ac0736d43e78e81b88ce00ea5d96db0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand2DElastic.__call___Rand2DElastic.__call__.return.self_resampler_img_grid_": {"doc_hash": "705de2f279ba0ab96d847a0ac9a12931ba92812905848acd33d603f5c982dd96"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic_Rand3DElastic.__init__.self.sigma.1_0": {"doc_hash": "d23c402e54d1f138dc8780780918f645b38fbecf05d5e96b8e91f2d8f2ef5b09"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic.set_random_state_Rand3DElastic.randomize.self_rand_affine_grid_ran": {"doc_hash": "bf214188d828a5dabc282054a808c100b0841e5a9a636e85a2d442c7b665d73f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic.__call___": {"doc_hash": "167d8661838767ef3f70f2a70a95417b5fa474d59216b7e06e3383975a345acd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd.__call___Spacingd.__call__.return.d": {"doc_hash": "468d4ae6f488058b789ca215fd5da40c7dbc764c65550a1203fbafbe07f3e6ae"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rotate90d_Rotate90d.__call__.return.d": {"doc_hash": "65bc9ec96563902ad38430e9496d1480a76ee650d856da9bdb5e1019e1bbee1a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotate90d_RandRotate90d.__call__.return.d": {"doc_hash": "c6e8cc8e64921852a29f391b54d7578551a9eb3530657db6885b1b6b5a80ecce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAffined_RandAffined.__init__.self.padding_mode.ensure_tuple_rep_padding_": {"doc_hash": "1593e2334595e116d5e917aea1957d739556c9f842f5cf1c45fd9deb002d69c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAffined.set_random_state_RandAffined.__call__.return.d": {"doc_hash": "29630154351b4a0a0da3561f603ad22295123ea1aba173e16b8c14286cc3bce1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand2DElasticd_Rand2DElasticd.__init__.self.padding_mode.ensure_tuple_rep_padding_": {"doc_hash": "d1671a6dfa02be2f68cca410e493bfb58d5830c42fef9e433ddf230bd1139f4a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand2DElasticd.set_random_state_Rand2DElasticd.__call__.return.d": {"doc_hash": "85f32954f7f3893eb2ccb2a10ac2a5e9d61a1120ca488cc3176f13c0d7f4e175"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand3DElasticd_Rand3DElasticd.__init__.self.padding_mode.ensure_tuple_rep_padding_": {"doc_hash": "32e7eb4d1a70e08f07f83807c4d4fbfdc6581551a170801b46d0575a7b176dfc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand3DElasticd.set_random_state_Rand3DElasticd.__call__.return.d": {"doc_hash": "273d17c5c5956bfeaeb12afe93c7b6ec9f8a35f3a8b4cffa1932e890b2cdfad7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Flipd_Flipd.__call__.return.d": {"doc_hash": "138116c8f1d1f14cdb5cf4ed8bffd32245fcf2352d35769b328a8d9b6f68f9ca"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandFlipd_RandFlipd.__call__.return.d": {"doc_hash": "9140da8e12a2a6b2c1c5369ed8dbbbbb2734394d3a084297181532f523625c23"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated_RandRotated._": {"doc_hash": "b4b719a46eb03a6caa852e8f99c71d3a50f5a4383c17147ff7b1e056511b3f0a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated.__init___RandRotated.randomize.self.z.self_R_uniform_low_self_r": {"doc_hash": "a1338d10cf8c17cb90256c7fe6c66d820f7a26e83cdaa8508ef0fe50ea6c0b1c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated.__call___RandRotated.__call__.return.d": {"doc_hash": "18065b894180a5ed22da2f6f4e29a4229539ab70b4be19662ef28e8d981b3861"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_SpacingD_": {"doc_hash": "71677b5ee07ca6a2544b6a5bf67cf70aaf8a5626cd7ef1b59537bf0a2176a120"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/__init__.py__": {"doc_hash": "cd2a264a38cdcbe3032b67d2af4c4c91d246a1a09f44c26ab577c069fc11f107"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AsChannelFirst_AsChannelFirst.__call__.return.np_moveaxis_img_self_cha": {"doc_hash": "34e30eb7930a04d4ea190b5ae34e4c059d89fd3f8abfb1fd337371d2be5df37d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AsChannelLast_AsChannelLast.__call__.return.np_moveaxis_img_self_cha": {"doc_hash": "0c5c143f4f0e7de9290e258d220c2dba428fbb5c1d76fe25278983235d1129b5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AddChannel_AddChannel.__call__.return.img_None_": {"doc_hash": "0c6f963e11939609f9578dbf40923081bf4d4ae9198a78588a800948e20e2208"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_RepeatChannel_RepeatChannel.__call__.return.np_repeat_img_self_repea": {"doc_hash": "32a5d69441f48fb80fd6cac188431278f8b94cdbb4864870b7080552dfab7b4a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_DataStats_DataStats.__init__.if_logger_handler_is_not_.self__logger_addHandler_l": {"doc_hash": "c16974260b9e32f33e910ecdede43500ff49a5cd197635dc778ebcce01dd9c73"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_DataStats.__call___DataStats.__call__.return.img": {"doc_hash": "d7fcde15ad768ea765dcb6833b1f3a323b03c90f71b3bb0a48b964a8be526bd4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_SimulateDelay_SimulateDelay.__call__.return.img": {"doc_hash": "558f5209af5505403bf6b4874f6653d427e1dd35476bbaed2e2f35fd91874cda"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_LabelToMask_LabelToMask.__init__.self.merge_channels.merge_channels": {"doc_hash": "e22438c1c5edcf802e3afc7b42a02e00ac0f0fc4e01e933c375e1647620bc374"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_Identityd_Identityd.__call__.return.d": {"doc_hash": "237748e487fa4afd9146e174d6e668e51e4c63a0050aae026c184e1f3e5bda50"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AsChannelFirstd_AsChannelFirstd.__call__.return.d": {"doc_hash": "6a18a99698a40b5e04815a2eec72d697c7a81a8f3b754c81673d93e2184901ba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AsChannelLastd_AsChannelLastd.__call__.return.d": {"doc_hash": "1d3f4b1bc0f4e1809e4f2ba090886fc898ee48829cf68d281c011851632f44fc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AddChanneld_AddChanneld.__call__.return.d": {"doc_hash": "473ffd9f7c3dfbd3f7ccf5f4a5a0bd773adc08e244c3c8eeccbbb3b1daba3aa3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_RepeatChanneld_RepeatChanneld.__call__.return.d": {"doc_hash": "7cf208746c8aab1a057381acb55075cd2f4068e1237fb06e1a841a83a8897530"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_CastToTyped_CastToTyped.__call__.return.d": {"doc_hash": "ef4a5cbf2904c516c90c0a1b3240c148c58c71c926ec0e044d466c390d570142"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ToNumpyd_ToNumpyd.__call__.return.d": {"doc_hash": "9ee43ca625b519c7c1d6617e883290c6c765d8ac162b4cbb56dc2dcd1522c494"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_SqueezeDimd_SqueezeDimd.__call__.return.d": {"doc_hash": "eebbe6bee51e655c4ac460fb164b4383071eb48a26f62961ca8d7906b3c2edbb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_SimulateDelayd_SimulateDelayd.__call__.return.d": {"doc_hash": "7dc0f26e250d26bbd7fc9310553513372b0e43eb288b047d26f414961b2bdda1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ConcatItemsd_ConcatItemsd.__init__.self.dim.dim": {"doc_hash": "48c1ec96d2489c1f189b872f807915ee3e03fa92542b01b23fa204c620cb2c94"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ConcatItemsd.__call___ConcatItemsd.__call__.return.d": {"doc_hash": "1b1953a69926bcbe2e8cd8799d0024b2a9367eda13c21080442cfdcddaa35fe9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_Lambdad_Lambdad.__call__.return.d": {"doc_hash": "a5f744a268b539836fbb4863b2d23766b8df282cb16c6d29cefa54b4670cf666"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_LabelToMaskd_LabelToMaskd.__call__.return.d": {"doc_hash": "19d0968ad3dd34d5b5b49a0cf2975b8cf42c7b43a34e1b10ccee23d16a053237"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_IdentityD_": {"doc_hash": "25ef7078f37d7a75f5705321647050252ec8167ab140bbd8b72e974805cc9840"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_rescale_array_rescale_array._rescale_by_minv_and_max": {"doc_hash": "db2c2fe08a3d0ff92b75e2d47d4fed07c9859ecea70f8ef81748b9c10c6fbf54"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_copypaste_arrays_copypaste_arrays.return.tuple_srcslices_tuple_d": {"doc_hash": "088aa389124e3e91df7ac7c6fc4564a57fc471763b3c5be169de0b94348cc545"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_grid_create_grid.return.np_concatenate_coords_n": {"doc_hash": "6172980984d8f9734391d27b37a543e14770357727c0f89f7eb7ef4495cc15eb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_control_grid_create_control_grid.return.create_grid_grid_shape_s": {"doc_hash": "c114daf815e4459bc7a520c138ffb2c326050fb6af7f5538b5eb796a8cdcc741"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_shear_create_shear.raise_NotImplementedError": {"doc_hash": "ddfdb5a89c386f984574e99920e60d1ec0452be9384b10f292d1c5171657796f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_generate_spatial_bounding_box_generate_spatial_bounding_box.return.box_start_box_end": {"doc_hash": "5c733678cb22be010184cdf61dc22671fd34d57a6b27be5b86d51b47e6601ceb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/aliases.py_resolve_name_": {"doc_hash": "c886a779bb1200bd53741b80841b9d2e62b03f68e939c82559d3cf49c778aa87"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_fall_back_tuple_fall_back_tuple.return.tuple_use_the_default": {"doc_hash": "bbe2c521d04567fa3488db7632d783383a77fef20b231bf9b88d77f6dfbcce50"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_is_scalar_tensor_get_seed.return._seed": {"doc_hash": "267070099ca2888d0a6ed9e5878524cf56b45578b992a7209dc3b8191f4cc7f9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_optional_import_optional_import.msg.descriptor_format_actual_": {"doc_hash": "b0f7d583b9b3c97db5d3fa9b6e832f2dcb781a490804f91a3260c7c5879a1748"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_make_animated_gif_summary_make_animated_gif_summary.return.summary_op": {"doc_hash": "f172785606ddd4fa777955f68d7c65e71732ff5346d938a7c87058b16fc98a2c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_add_animated_gif_add_animated_gif.writer__get_file_writer_": {"doc_hash": "4ffb703c2c5878b671111fd5da6dc7c6651d6990afe32132d4edf0eead8d3ce6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_add_animated_gif_no_channels_add_animated_gif_no_channels.writer__get_file_writer_": {"doc_hash": "642422e93e99cc0911e407f9908ef978df8f60514cfd4075593b1a8a8a4cc14f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_plot_2d_or_3d_image_": {"doc_hash": "eb25b3e6b579a8c14343d29164814013a1256f501680d40cc6504af00d29f8c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/__init__.py_sys_": {"doc_hash": "b37e807e6619a0b2f7a1a6c198cb34607f180a2a1e1eff536484320d1aeb1c7e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activationsd.py_unittest_TEST_CASE_3._": {"doc_hash": "088ae3f00a9e3b3b9dd501f13d21fadf477531944c282f388f30994240c431af"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activationsd.py_TestActivationsd_": {"doc_hash": "67f6691b77885e4a4a65492867810315cc1719295169b1e21d877c53f7f0e911"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestAdaptors.test_multi_in_single_out_TestAdaptors.test_multi_in_single_out.None_2.self_assertEqual_dres_lb": {"doc_hash": "26d9b3b8be0daebadcf5424eac739a9ea8adbd7f6688d5578fae19455c0f020a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestAdaptors.test_default_arg_single_out_TestAdaptors.test_dict_out.self_assertEqual_dres_b_": {"doc_hash": "a7c80dc0ab29bee80025d720f5933e32498f91a2c43c75a795393efc9c108ce7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestApplyAlias_": {"doc_hash": "fe6ddfa1c4a52a69452f45cc189680caf8d9500502b2a34a5878b3dadf1be28c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_add_channeld.py_unittest_": {"doc_hash": "c7d40e184132e887c18244c6946c91240fa75ced95b4ec7fee88d49f779a24b6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adjust_contrast.py_unittest_": {"doc_hash": "8057367f6dd6b93d7f8409562201f4c2e83dc61e92b613a90cbb4663d533641c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adjust_contrastd.py_unittest_": {"doc_hash": "9eb87071c8a78da6e13358350193ca074813c4b86a4173a881780e2172f9623e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine.py_unittest_TEST_CASES._": {"doc_hash": "572005c2d477f31d63231dbecd6384a335cf574610f3f539cac8ddcf52c01189"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine.py_TestAffine_": {"doc_hash": "ae1177984e6db7f4fd0f5e3e2d75d02dbf0aabd72e96f68728047cca80509ce1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_grid.py_unittest_TEST_CASES._": {"doc_hash": "926fda71ae58d810dc1e04aff32c0b845c9fbb712d563fb2787d918bc7742a1b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_grid.py_TestAffineGrid_": {"doc_hash": "f03f73944d020c373006a58934ea4f4de8040bc016f64adc73df2bea081a0b40"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_unittest_TEST_NORM_CASES._": {"doc_hash": "e48ba93f6040f4c15a4c29ebef42c1bdbaf96b35cbdd15ffba51e11745d4971b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TEST_TO_NORM_AFFINE_CASES_TEST_ILL_TO_NORM_AFFINE_CASES._": {"doc_hash": "d2072f01c545fe04b826e7add2f571a82f0416f0b4f40eb0ecfd175ec009b121"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestNormTransform_TestNormTransform.test_norm_xform.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"doc_hash": "a98f83ec098f4478992746b00185f13b1f17fa78a31a29b1b76f51afc66f2c24"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestToNormAffine_TestToNormAffine.test_to_norm_affine.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"doc_hash": "18f1da9302c7fb91a0579b12380816103c9c407e93b76c4385c0ddf548466786"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform_TestAffineTransform.test_affine_shift.np_testing_assert_allclos": {"doc_hash": "0d13680d8b1963bb5bc445a9f58f2cd608fe1023be26daa44253f643f702c03d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_shift_1_TestAffineTransform.test_affine_shift_1.np_testing_assert_allclos": {"doc_hash": "9fd7e5f09bb8175cf7dc6277e6a8b859985250d35ff5162ad1e5aa2d151c6e37"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_shift_2_TestAffineTransform.test_affine_shift_2.np_testing_assert_allclos": {"doc_hash": "26928cfa9b219e220b25e44c5041f40e35569cc210ea9997ccbf34582bf85224"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_TestAffineTransform.test_zoom.np_testing_assert_allclos": {"doc_hash": "df6289faedb5b48725e734f93866586c19aa1182733b6bce27d36ce9c08735ab"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_1_TestAffineTransform.test_zoom_1.np_testing_assert_allclos": {"doc_hash": "6d4a36371f4558b530c1c47b7a841869a65251f9f49a25babaf8d4cddfa4e392"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_2_TestAffineTransform.test_zoom_2.np_testing_assert_allclos": {"doc_hash": "ed8ba16e6d72cb7c4c1cc8502d541fff0a0515c4f8c48a709dacdd19a17e8378"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_minimum_TestAffineTransform.test_affine_transform_minimum.np_testing_assert_allclos": {"doc_hash": "7dd10709bd2be20fd48863b8d3e1c1e019d5ab0c7b7e6c461d42b6fbde8dc73c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_2d_TestAffineTransform.test_affine_transform_2d.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"doc_hash": "80946492e237689743519042f79e4a6f507693a9116f37683c3d2f68bed1ebf4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_3d_TestAffineTransform.test_affine_transform_3d.np_testing_assert_allclos": {"doc_hash": "16f06eed852264cb5622ee2ec8510813c52c69ddccaa54be638ffcbf060672bb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_3d.if_torch_cuda_is_availabl_TestAffineTransform.test_affine_transform_3d.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"doc_hash": "ebf9515b1856a7e103b39450878cca2c01d0e625148e91d059803cf3434ea975"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_ill_affine_transform_TestAffineTransform.test_ill_affine_transform.None_3.xform_image_affine_": {"doc_hash": "ecd12f0dea315af2aa6ec18b7973d9041a48525fb8ba07bc36d11b8d13939e53"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_ill_affine_transform.with_self_assertRaises_Ru_TestAffineTransform.test_ill_affine_transform.None_6.out.AffineTransform_1_2_i": {"doc_hash": "043551c328cddd108fa5afe0c3db7742ef664e59652e15103c38f1ae5100e406"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_forward_2d_TestAffineTransform.test_forward_2d.None_5": {"doc_hash": "8573b6534153052ece55147242c140b0b00fa7e8bc03bf1c4673244b82170eac"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_forward_3d_": {"doc_hash": "b85c8b4b8709076acf578f156a94a57f25d70882c98e650ad6d8250793672093"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_first.py_unittest_": {"doc_hash": "785f89049dd335efcca6f3f20a161ca73933542b9d93bc9cce2d29bed6a9afe6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_firstd.py_unittest_TEST_CASE_3._keys_image_labe": {"doc_hash": "361ad8e307d5d6dc0cf2d6e324f43ac2f5620ec17ad5d7dc2eeed1d36f19a5a1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_firstd.py_TestAsChannelFirstd_": {"doc_hash": "2b89d109c1aafa6733ef45fb646787251cb73ca2b10d37ff652f1129817ef71a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_last.py_unittest_": {"doc_hash": "fb4153dfa933bc110bea8dab911e4197b0d460271c13ec08aea15753aa171308"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_lastd.py_unittest_TEST_CASE_3._keys_image_labe": {"doc_hash": "7c61bff95e70624885022f8eac0c43134ef7096db8a1273dc4f16405852e4b63"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_lastd.py_TestAsChannelLastd_": {"doc_hash": "962451c788fafde0a7bc3e58a57a092f74de02f3398788d37d736fb166960455"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discrete.py_unittest_": {"doc_hash": "95241b9f889b4c1d90ca0b0bc8c34a7f24fecf3da2477477b4efe4a0cb3a57b7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discreted.py_unittest_TEST_CASE_3._": {"doc_hash": "11b4ab0410cbca9b5c40e46198cea229113b23bf4b8717f21adc74dbe23efa23"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discreted.py_TestAsDiscreted_": {"doc_hash": "de2ec18a5d07391757b4b05cc51556a6c364bce8f49e736ebeb223bcad9aae1e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_border_pad.py_unittest_": {"doc_hash": "318e26b3378c951f5058dedff77a4ded60f775c58c5451782503231bea21607b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_border_padd.py_unittest_": {"doc_hash": "ec5796d6d76576a97ed81e27495eba61cf0d904f1d5b90eff5f27f396c14c4ce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachedataset_parallel.py_TestCacheDatasetParallel_": {"doc_hash": "d56f8d2a5d87da5d35ba32249d3c86db36b4e804996df4ca388811d62a62887e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cast_to_type.py_unittest_": {"doc_hash": "3874d5c19e9850acb445cb056296c09d1ade19ee2a0d11cbf196a5c8cfacf40b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cast_to_typed.py_unittest_": {"doc_hash": "c17f465a9a9eac1ecde64ddc987aa41f0a51218c13a956756350316b9b5678f7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_crop.py_TestCenterSpatialCrop_": {"doc_hash": "e584aab1f2cbf4dae47abe5a5f7dfbe608db10be5cc43adc296db379a5e475ba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_cropd.py_unittest_TEST_CASE_2._": {"doc_hash": "d6175485275484de30cab6749b06283530d7b0c8366732b43174e3986f2fd52d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_cropd.py_TestCenterSpatialCropd_": {"doc_hash": "a101ec3afb45d5d85417ee12a8e999e0fcc132ad494fbfe6c1373dec33820f30"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_list_dict_compose_TestCompose.test_list_dict_compose.for_item_in_value_.self_assertDictEqual_item": {"doc_hash": "8cdc84a619cf2134542693c7da7be654537901a01d4a59a667174c4f8339c137"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TestComputeMeanDice_TestComputeMeanDice._DiceMetric_class_tests": {"doc_hash": "89b709a41303fa55a4bd23c248f0a4a78d388223ec597e3ac835132a39690e10"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TestComputeMeanDice.test_value_class_": {"doc_hash": "651ae00aa693b4c54ddca04c349b43d93d449ed7ea4f827abb1fcb555b44e16e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_concat_itemsd.py_unittest_TestConcatItemsd.test_tensor_values.None_2": {"doc_hash": "b6c73a1584293328beb8b757ea5e89eced653a5dd8a38cd3a37d1778c88dfd8e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestResidualUnit2D_": {"doc_hash": "61d334d445fcbec195a3fa5260191ad7b8d6bcdd63fe629e30a31a18165a22d7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_unittest_TEST_CASE_4._img_seg_2_img": {"doc_hash": "ecaf59ec4f9cb9653b11bf1c83f6c29a22a7b6581aacd8c2e92b59f275450971"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_TestCopyItemsd_TestCopyItemsd.test_numpy_values.np_testing_assert_allclos": {"doc_hash": "98ac23f1c3bf3edb9a7c9dfa56216e4cd6853abcbe31e8f31a6819a206ad6125"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_TestCopyItemsd.test_tensor_values_TestCopyItemsd.test_tensor_values.None_2": {"doc_hash": "0bcf889c029cf3ca3a1cde9f1e34bcccc00802356dfe59fdc09a187227a5714c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_unittest_TestCreateGrid.test_create_grid.g_13.create_grid_2_2_2_sp": {"doc_hash": "ac06c6470e643ebf94887df1d7d1e92248157ba75663cfd1611f78f0ea22cd7d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_grid.expected_14_TestCreateGrid.test_create_grid.None_7": {"doc_hash": "ea0cf37951d799c88b511d843711b4fdfd9d925b43c22f8f278ff07855525048"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid_TestCreateGrid.test_create_control_grid.g_6.create_control_grid_2_0_": {"doc_hash": "3353897fdbce19ee7ce4cdea3442b970465c8b388da6580878a68bc556c393f6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid.expected_7_TestCreateGrid.test_create_control_grid.g_8.create_control_grid_1_0_": {"doc_hash": "19d8fc6fa8f2fc207a37e551845b9569a73028b4dd26ed064276c08a3273bf7f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid.expected_9_test_assert.np_testing_assert_allclos": {"doc_hash": "f1feb896d9162bf61ab9a1e63d28925d79e2883956aca351e6e3cca4ed2ed624"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine_TestCreateAffine.test_create_rotate.None_4": {"doc_hash": "6f971b4c7f60022496d6b65ef4b860ff8bc2999dbc46781090cca0ad9fb2c047"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_shear_TestCreateAffine.test_create_shear.test_assert_": {"doc_hash": "a9800b20bd42f70e6d35e87b20f5691f2dec77e176c0ed06b06346523e07ad63"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_scale_TestCreateAffine.test_create_scale.None_4": {"doc_hash": "69f6e71beb32818100b2f0415fa5e225d0f38117628fcb2cbc1088c1fde52e26"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_translate_": {"doc_hash": "c87d2eeb72911fda802f3dae80f61cdea1a37c3552d910d2e11602f5435ef889"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foregroundd.py_unittest_TEST_CASE_3._": {"doc_hash": "89b6cc98f0374c86af15607073e706b298ac5dfd77ee87774f8fa486d7f4da02"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_stats.py_TestDataStats_": {"doc_hash": "9ec461cbd016f0dab16874e852b5529b9561981b19af03193500dacaf00c96f5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_statsd.py_TestDataStatsd_": {"doc_hash": "12a0281ee0602041fcf9500de2b1550eb4f47850e8b5b4bc32ed57fd9dab5d4a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_densenet.py_TestDENSENET_": {"doc_hash": "4d7dfd992b7b79bf6cac4ae8079ee835dd5690a37a68cc2758f23db67ec83310"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_unittest_TEST_CASES": {"doc_hash": "ce0949719e3c1ef9a4c680359bd150badb798e46b365efdaa4b04279adbcc87b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_TestDiceLoss_TestDiceLoss.test_ill_opts.None_2.DiceLoss_reduction_None_": {"doc_hash": "e070729c0da0b4f75f62a28196fe0a3d3a2b55845e31c5e624bab89e885e03bc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_TestDiceLoss.test_input_warnings_": {"doc_hash": "73dd553a6c5dc13e2180082650c71393570edf09609bb0bb03f4d6070539305c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_divisible_pad.py_unittest_": {"doc_hash": "3fb883fbc498bf828fe679601f9d241362f79e33b52cd5bd58372fdbede6920e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_divisible_padd.py_unittest_": {"doc_hash": "398ea5bde6843254c7b8819dcc193aec47c610aa8524532052d719decc57ef65"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_download_and_extract.py_os_": {"doc_hash": "0226004ac75a22703dda908ccc9562cb59d4439143f75fcb65b69813a87c4709"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_downsample_block.py_unittest_": {"doc_hash": "409030bd0092ab157750fb407cccc111f3f8417175c8b5004142e11732976534"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_flip.py_unittest_": {"doc_hash": "ef17ec39f5d39709b5f2f49ddac5fbfe17b52828b613c5dd44b33273d71d9d48"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_flipd.py_unittest_": {"doc_hash": "d8ed9bf49934300df754482ad7928c7adb9ff90535b324911153169081141e4e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_unittest_TestFocalLoss.test_consistency_with_cross_entropy_2d.self_assertAlmostEqual_ma": {"doc_hash": "2cab290d00399aa400dcda62fa45c112dcd5d9344fbd61ae45cce80f9650c264"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_consistency_with_cross_entropy_classification_TestFocalLoss.test_consistency_with_cross_entropy_classification.self_assertAlmostEqual_ma": {"doc_hash": "f2916eb7a9fc7ff22a655d29e6348c398372e91cc119cb9061e32ca14add0479"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_bin_seg_2d_TestFocalLoss.test_bin_seg_2d.self_assertAlmostEqual_fo": {"doc_hash": "f7bb0edb30d74d606a67d19246af08bed1a79d2a13b1ee52c7ab0fc4041c072a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_empty_class_2d_TestFocalLoss.test_empty_class_2d.self_assertAlmostEqual_fo": {"doc_hash": "c1cac849ae6497164941655af67d88e083343a51fa75e139182eff6e12ca05a8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_multi_class_seg_2d_TestFocalLoss.test_multi_class_seg_2d.None_1": {"doc_hash": "674ddeae33276de78b1fd6dfb4eaedf922cfaa80a5aebdf67a4f1f6163885c72"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_bin_seg_3d_TestFocalLoss.test_bin_seg_3d.None_1": {"doc_hash": "9958803627dc718739da3a2f9b5be0cbe6561931aae6737c3c2c601bc05bf65c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_2d_GaussianFilterTestCase.test_2d.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"doc_hash": "89826ed9d6618f87e42c2d0b69cbfe1434420e0b542ea0a1525debdce19945d5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_3d_GaussianFilterTestCase.test_3d.np_testing_assert_allclos": {"doc_hash": "b78212f0f7160dc9395d0be926b476d4f36018f12dd6085b74adfacdbe95ea6e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_3d_sigmas_": {"doc_hash": "6583018c2fc411905c9b3a00e8eb0925a00ee13ffd0521f60713d9397eddd59a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_unittest_TEST_CASES": {"doc_hash": "d1cfae62dc4ea6168fd01e7c1c8d51666e6a96705deeb055afc2bd860bf8301e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss_TestGeneralizedDiceLoss.test_ill_shape.with_self_assertRaisesReg.loss_forward_torch_ones_": {"doc_hash": "9a92420756e9c35982f141d11453e3919b3e75fe4f5eb6d2cfb54ea4a923d7ef"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss.test_ill_opts_TestGeneralizedDiceLoss.test_ill_opts.None_2.GeneralizedDiceLoss_reduc": {"doc_hash": "8f314f22055cf461cd867706152abf8a774bb5e5fd2edc3b016a1411b66fd35e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss.test_input_warnings_": {"doc_hash": "848a1e54917479d0a6bd7f02af1e980c859dde94ed13fda168e2460a6477bf9a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_pos_neg_label_crop_centers.py_unittest_": {"doc_hash": "e2d1f678e9b20f674a596c36e03cb5a73f4d21fc9d83e656d532fb55559ac1a4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_spatial_bounding_box.py_unittest_": {"doc_hash": "eebc71b4904b81298fe3ea9368c4c9254a479e6f7d201619ad38df201f1c8843"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_rocauc.py_unittest_": {"doc_hash": "3b3cd959299298855f07d07d75c2eb77e8ec776ac5d51eb1724434eb17fdf115"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_segmentation_saver.py_TestHandlerSegmentationSaver.test_save_resized_content_": {"doc_hash": "0ebb83f8042c09dc7b65521f22dbd79e23efb0307d1705ae088127005141d2f8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_print_TestHandlerStats.test_loss_print.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_1_2_3_6_7_.self_assertTrue_has_key_w": {"doc_hash": "4c215efd41667901389a4aec0b492aab2855cf3cc00d5bd26edf0511a9c3edbd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_dict_TestHandlerStats.test_loss_dict.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_1_2_3_6_7_.self_assertTrue_has_key_w": {"doc_hash": "2ca20bfa4a7d967123ec17a41ef8f3c1a107ed293dffde092d5d72585e255a5c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_file_": {"doc_hash": "fdb2d1adcb0beab4d572afbcc2a9f5cd38ce9440c1d3ea895b95e3218effd946"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_image.py_glob_TEST_CASES._20_20_2_20_20_": {"doc_hash": "0dfb8e2714c725a80b9f3d9d26bc6a18a3d6cf2d9b1b399833c7f5b5d589663a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_image.py_TestHandlerTBImage_": {"doc_hash": "0b7d80d88199e61b37d0fdaffe836ac6e04f95732929e2db29173e5ec1a84d8b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_stats.py_TestHandlerTBStats.test_metrics_writer_": {"doc_hash": "8640ee9559d7f821c3736a2c8f595d6f30ac7832460627a0d2ac2218112eba18"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_validation.py_unittest_": {"doc_hash": "5f750b1c9e387cd2b494f32115c87e1cb217b25ba0ae3081afe64af8851ef001"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_header_correct.py_unittest_TestCorrection.test_correct.np_testing_assert_allclos": {"doc_hash": "93cbe20008aa25c06a1dfbcb094fcdb63a67f15378e3ff27521e2995a172eaa8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_header_correct.py_TestCorrection.test_affine_": {"doc_hash": "be2df905652f61cdf637b4262366af9a9265c10cf48b01a888366a5ff673f3c2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_identity.py_unittest_": {"doc_hash": "19a40849fdb212627b01e4884f3f41b37961bba4e063ee5a58d909197621eb2c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_identityd.py_unittest_": {"doc_hash": "908502b8cd973d8e67ba2922c2dc616fa47c802886e7c582e4547d8cf08cad52"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_os_MedNISTDataset.__getitem__.return.self_transforms_self_imag": {"doc_hash": "d85fa052340b61a27f863648483b410f0f76311641d24a0962c7e8597286f6d6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_run_inference_test_run_inference_test.return.tps": {"doc_hash": "11e6a2431bb36bffd83ae5755709b2eba436d88e332553f34160c7179fd1014d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_IntegrationClassification2D.tearDown_": {"doc_hash": "28d01e7cb655565530ff7f3fdffea8a3b93f8876765338a69ba236d177a08fb3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_unittest_run_test._TestBatch.__len__.return.train_steps": {"doc_hash": "6903bc550e80d6085839481e84e6f818aae53eb4e3eb382ddd8b155577900a14"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_run_test.net_run_test.return.epoch_loss_step": {"doc_hash": "5a22cf5eb4b83be6ab6078d2bfb7398a69f9f1d2d136cc43d2dcc6ad0aae1919"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_TestDeterminism_": {"doc_hash": "aaf04d13ebf07838e050dae17a8376289ef014e8194474d6e79f7ee73db41fd3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_run_inference_test_run_inference_test.return.metric": {"doc_hash": "25a7b421b93f694e569bbbf97201cbdf4ad9482922eaf4f95a9948ab922d2e9f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_IntegrationSegmentation3D_IntegrationSegmentation3D.tearDown.shutil_rmtree_self_data_d": {"doc_hash": "71e2b724a0a95c923296ee182e41cd42fec17f687f83328626c175e03528cbe2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_sliding_window.py_run_test_run_test.return.saved_name": {"doc_hash": "e24be6f56e349b80a6d92f9a56e0c92c4891d39359ca97db8f52cf64a5314bab"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_from___future___import_pr_STNBenchmark.forward.return.self_stn_x_": {"doc_hash": "cba2c60f6ed6c557f598ef09da2edf352c377d34f818f4732862b10761001987"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_compare_2d_compare_2d.return.model_img_a_detach_cpu": {"doc_hash": "1e344d42b8989a3de9f8aa2921be275c1a9cbabe8ade1de2607859ae605267d8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_TestSpatialTransformerCore_": {"doc_hash": "a374cc357da01805ac8f85cf57dd8314ea7b94678b050f37e8ee33f290e84613"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_run_inference_test_run_inference_test.return.evaluator_state_best_metr": {"doc_hash": "ae4a341a0bc29a3a19d3d7cf15b591d066c1cd27180c072cdf4876f6606108ad"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_IntegrationWorkflows_IntegrationWorkflows.tearDown.shutil_rmtree_self_data_d": {"doc_hash": "6005549b64ee9a35d61f57722ebea1c8741f7c1da38c68962fd870138e30af81"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_IntegrationWorkflows.test_training_": {"doc_hash": "8acfab30549f4c5e9c2b60b06fe57458ee53c50964615221afb36660c46e9303"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_unittest_grid_2.torch_tensor_0_0_0_": {"doc_hash": "00b89435a9a0893ddcf34dddf5050c1cefdc362963e6a1f700c4eae418661b6d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_grid_3_grid_3.torch_tensor_": {"doc_hash": "43a900334a7591ddf106a1dcff07dcb0999ae37a45150234add55123cafcb3a2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_1_TEST_CASE_6._": {"doc_hash": "dd7a7fb0a12daf4f2dff1d6d2d440fea33f199fe5ccc4afb4b21a7b2ea83fe3c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_7_TEST_CASE_10._": {"doc_hash": "579a09659049b28ee2e268c848e7162df7ff7e14c33f626e1ed14af5f6cc4afa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_11_TEST_CASE_11._": {"doc_hash": "59cfd1af3b18c7e865388a321aa7ecec960cb5f1b0529f71a2f9bcc321228bbb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_12_TEST_CASE_12._": {"doc_hash": "318b0528a7b8406d19f984b82dccbc44c8e397c5babb756cc1e3c1ca979ed5f3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_13_TEST_CASE_13._": {"doc_hash": "41538a0ddb7f8515f6d4feb743ace6b8b0712058f056262fe818f771a670f802"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_14_TEST_CASE_14._": {"doc_hash": "9513d43143f430bb9a759b2f378f16e4c2fbff49541441dabace61ca9c9f6590"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_15_TEST_CASE_15._": {"doc_hash": "445e34b006220312acb9edfa8b111184cd09f8993aa69193e888d0b7cd0c2cb5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_VALID_CASES_INVALID_CASES._ITEST_CASE_1_ITEST_CASE": {"doc_hash": "0519f5ad54c2ea2d5ece0b70381d033f66dd72aaffa21299b6b5f51d580fab0f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TestKeepLargestConnectedComponent_": {"doc_hash": "9e6c86a0aa7ffcd46f7746d95651d7461db5c4ddd60470bfb3087e95e17516b9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_unittest_grid_2._": {"doc_hash": "a0faba58eb022c0b0605b92e4678c665d9729add21dd7e2c89331d9b89daf921"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_grid_3_grid_3._": {"doc_hash": "8be83d6d11b2fecfbd1f97df5deb682c957972cb3c1ecd98a13cc7d06accf761"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_1_TEST_CASE_6._": {"doc_hash": "0dadbf13d6eb2d5d50748951662fd7e4d5e5f0d276ca4e9cd586641e1af29069"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_7_TEST_CASE_10._": {"doc_hash": "c83925bddfd029418437d3e8e245fe809566fc13bafcda5eab58542743b6a981"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_11_TEST_CASE_11._": {"doc_hash": "3987923c9f58ad831e491757e4e6c773f8cf88b2221107f5eaecb5e2c33aa513"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_12_TEST_CASE_12._": {"doc_hash": "b5caaaecc4a8bdeef4cdb7477863d4c024f6c62d97ae060646932e749cc12498"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_13_TEST_CASE_13._": {"doc_hash": "b396745f71c868fda0f4fa32a45d1aa6a4a7703d4d5872ff48fca16bc6cb30ff"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_14_TEST_CASE_14._": {"doc_hash": "ef1e3541a6359e083ff7b8da42da43c858c2879e49851956e1764f3475b79930"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_15_TEST_CASE_15._": {"doc_hash": "8c0944548fd0b745300ddf4b0a2bedad009914c4093d2685ed99f1b07e6d884b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_VALID_CASES_INVALID_CASES._ITEST_CASE_1_ITEST_CASE": {"doc_hash": "a234bdb65f56c5b9b714c7d68d6b05cb0e552878960669a8dd2746dc148b6c3a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TestKeepLargestConnectedComponentd_": {"doc_hash": "27d81f14f7dafd5c3677602c878b4c36a140d41bfd58e1160d9886d340fb3ccd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_unittest_expected_output_for_cube": {"doc_hash": "bdf554f93bb83fd0a2d2d0b4148c563db39990c7fedf6333f57f932d7f24bb53"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_gen_fixed_cube_gen_fixed_cube.return.cube_expected_output_for": {"doc_hash": "7b629ffee4dea9214e5bb21e592ea2c80ce5f74ad1cbf4ae403793a35f273d67"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_gen_fixed_img_gen_fixed_img.return.img_expected_output_for_": {"doc_hash": "42bba26a5877e4380e0c4479c27d03f35595b2650150fe87e0199b853d2cedda"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_TestContour_": {"doc_hash": "4e020e583fafbb564ef9cd7aa8fe2611e2b429b04596f075239d15cc4fb69ae1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_unittest_expected_output_for_cube": {"doc_hash": "2b38d605ec24002402dba51174973f26a531739fde7de4ed5ea40b5a6d6055c5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_gen_fixed_cube_gen_fixed_cube.return.cube_expected_output_for": {"doc_hash": "2fe8f8caab5a687da08db9474e398a2342806436299be8329f3779d8371d7777"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_gen_fixed_img_gen_fixed_img.return.img_expected_output_for_": {"doc_hash": "13023f92ad319442886deb243730bdea8f650d04d49b690c0ed741ec2dea2ad2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_TestContourd_": {"doc_hash": "7142618ad0c5e1af24197b61855ca9055b305b628893bfade4f4dc8e6271f868"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_mask.py_unittest_": {"doc_hash": "4fae35c910b35b22450badda3029e334c1d7ca609bd9778d04efb0ab54979731"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_maskd.py_unittest_": {"doc_hash": "959b8b79ce55d397d0aff4d4723e53ed873c8e3d47e09e2834bde39b24879563"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lambda.py_unittest_": {"doc_hash": "c079e68f62501b6f8350f9129fb4fc610dfa9e9ae5d57b2241d3aa02d6e84c32"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lambdad.py_unittest_": {"doc_hash": "7f96928b83777563c190cb66cc15cdb4688727cf83290ebd8ac3b1f2b52b5478"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_list_data_collate.py_unittest_": {"doc_hash": "ee4bd3ea12ddb291c3e6329a0f9d209af1fd1b6084f70acfcb5c09430c3f4291"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_os_TestLoadSpacingOrientation.test_load_spacingd.None_5": {"doc_hash": "e80bb6a82f1bb4a35400f7c64c1636e6235095f102d328be005d33c5015302c7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_TestLoadSpacingOrientation.test_load_spacingd_rotate.if_anatomical_not_in_fi.else_.np_testing_assert_allclos": {"doc_hash": "cf0ca0fb2c5b0e855bc6f004516507423ecd4caaaca2de6290d24f40a328d32b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_non_diag_TestLoadSpacingOrientation.test_load_spacingd_non_diag.np_testing_assert_allclos": {"doc_hash": "5f854c3909246763e5ba7c91795a918ea22efacad7d8d589b75418f86a6ad0d2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag.np_testing_assert_allclos": {"doc_hash": "f7d45d3b9b7b8f48697b5c8bcaaa36e33ae965191678fa536312639799bc6437"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_ornt_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_ornt.np_testing_assert_allclos": {"doc_hash": "68782a8c95e5a274d97f06631f1d6252fc2c9dad0d7301b4fe15920ab4d967b6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_non_diag_ornt_": {"doc_hash": "41a6db3d1d775c3ad24644103f89b2f753444d0cd254bd8da6dce00f483b8019"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_transform.py_unittest_": {"doc_hash": "f901906da37659b7479b6bf768654388dd0ebb06c6b533364c50308f6f53c13e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mask_intensity.py_unittest_": {"doc_hash": "e6bd3b539ae0630510d6fcfa1347dc8ce780482405701a7e2070c1c5c5d11193"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_unittest_TEST_CASES": {"doc_hash": "2e80b64a9e6a9c669eb5bb8627616809fa26ef5e806c43513769d296f1517190"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss_TestDiceLoss.test_ill_shape.with_self_assertRaisesReg.loss_forward_torch_ones_": {"doc_hash": "9a10dacd3c87ff66d71108cacce51c2b4550e0f9d993e2d0984cc193addc92f2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss.test_ill_opts_TestDiceLoss.test_ill_opts.None_2.MaskedDiceLoss_reduction_": {"doc_hash": "f6aae6ee3ce63654ce04fad035ef739434267de0272a457a66dc444f24a913f2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss.test_input_warnings_": {"doc_hash": "500897acc35cdf3c18f7c7a0a451e063d11614efc63e068c32a5ab617ab58057"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensemble.py_unittest_TEST_CASE_6._": {"doc_hash": "4e7d96f5f435be9fdabbf32f531956bad1371c84a562de508057528a44b4049e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensemble.py_TestMeanEnsemble_": {"doc_hash": "69731292576158a1c9597444dd2a9ba713218715791b79b0fbd74b80e6f25966"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensembled.py_unittest_TEST_CASE_6._": {"doc_hash": "1f5a02f3bea4a16d9b50b97aecb26e2a27b432adbc67fbf3b9a46d348d6f4360"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensembled.py_TestMeanEnsembled_": {"doc_hash": "b4fc3e366b4f3848105850b93143ed2c58e9d37520c45138add67b108f8aaa8a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_header_revise.py_unittest_": {"doc_hash": "afb68eff5e92578578f94d55e82df3d07ad17462db6717510a10e101ef1ec12b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_os_TEST_CASES._": {"doc_hash": "9711686cd689597778193395d3b793a3a38ee9e378173f85c6ea9341afbb766d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead_TestNiftiLoadRead.test_orientation.np_testing_assert_allclos": {"doc_hash": "cd719f32159ca305592ce60b2ca0507d9bc7293912dce10ba3d18aa499d2e2de"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_consistency_TestNiftiLoadRead.test_consistency.None_2.os_remove_test_image_": {"doc_hash": "978ad2ece8c0255a491f3f95091de0ed9544b0f99a0af08a8260132b19d8f6da"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_5d_": {"doc_hash": "d4a5629e6fd9055c4b2b1d01aeae4dc3a158947e1e7203005d920aea4eeb55fe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity_TestNormalizeIntensity.test_nonzero.np_testing_assert_allclos": {"doc_hash": "2dfbf6773bdfba19e4c9750ba1edb1f472ff04ca7a66d5350b9e1a061f79731f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_unittest_TEST_CASE_3._": {"doc_hash": "f5dcbc567ad75c56700713b4cb1dd3e256d4c558a1a4a3cb534f0eefe422be8e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_TestNormalizeIntensityd_TestNormalizeIntensityd.test_nonzero.np_testing_assert_allclos": {"doc_hash": "b65f7c110ab937796b2c57b3870d861252f94c9c3dd1cc56075f1267935c0fd7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_TestNormalizeIntensityd.test_channel_wise_": {"doc_hash": "9d765f853ceaae45ddfc396ef6c6594b003bf648e128753a58185adf34f11654"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_unittest_TestOptionalImport.test_import_wrong_number.None_2.print_my_module_randint_1": {"doc_hash": "dbf5690ee10001fab60702daa34aba0eb16ebf943892702f4b106a6350b54fc4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_TestOptionalImport.test_import_good_number_TestOptionalImport.test_import_good_number.None_5": {"doc_hash": "494cfd606891dafef2826956fdd356353733fd17772cf76086e2dbd3cea6de70"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_TestOptionalImport.test_import_exact_": {"doc_hash": "cddc54d3b868491642de3387187f1f4961d926776d5854aa8f54d00d6e5d14a6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientation.py_unittest_TEST_CASES._": {"doc_hash": "02b35793b4eb42358560f6925c758b07aaf2449514403a2c0f0c927e6427af81"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientation.py_ILL_CASES_": {"doc_hash": "c159930d350c92b014f3025488a10cd3b6ee349b43ed658461f7087c3d843038"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_unittest_TestOrientationdCase.test_orntd.self_assertEqual_code_": {"doc_hash": "948e8a4fdded8c51b27a00bb10efb2602fd0245cb9eb8b59a499188df2ba01e9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_3d_TestOrientationdCase.test_orntd_3d.None_3": {"doc_hash": "2dc638abd86325ec7716c6c769e1e0b873881955b51a06223a6707a24041e0e7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_2d_TestOrientationdCase.test_orntd_2d.None_2": {"doc_hash": "1f1607a521975acca7ad39c517072ee7b3bd748cb61fc5f36ad602d5192f5bdc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_1d_TestOrientationdCase.test_orntd_1d.None_2": {"doc_hash": "9d9747d6879423925012b82fc555ca3abdd7e90d2ef3eeca8001055598e9cc74"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_canonical_": {"doc_hash": "e52a617e3c2212622f6e70d1747e023d0b65841490385f7793dbe634e4a51931"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_plot_2d_or_3d_image.py_glob_TEST_CASE_5._1_3_10_10_10_": {"doc_hash": "d9a1dae3663783b43e9341379ed38b38d8216b2f04ece15f88731e0cd8491647"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_plot_2d_or_3d_image.py_TestPlot2dOr3dImage_": {"doc_hash": "3cc600bd8e91cc696086617d4a64ca43e58f04b1aaf16028515a4c4d777f1b5a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_rw.py_os_": {"doc_hash": "1a9b73383e8e779a4d339aa10aaa6111432425b926f554225473202c29685cf1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_query_memory.py_unittest_": {"doc_hash": "08736eea0c19ab7279062412adc289aa4988081efbfe7db26ff2ad461b88543a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_adjust_contrast.py_unittest_": {"doc_hash": "adc4fa9caf249af05ec82ff93cacb15ef671e055205e2124e10c8afd625dd465"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_adjust_contrastd.py_unittest_": {"doc_hash": "7d98c4d49e281e353a5461ad0cd51ff274b1ca83562985699d54734aa26f78b4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine.py_unittest_TEST_CASES._": {"doc_hash": "09f948405d827a7367cb360c46e539f33f78e9aba6c5e1b746e3e7b316b5f9a1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine.py_TestRandAffine_": {"doc_hash": "4c4bd8baa3cd9c80135d0a35fee39a7281bcd5ff8c5afc5f98bf4f833cd7b37d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine_grid.py_unittest_TEST_CASES": {"doc_hash": "fd73fb129123450edb1c817c266a334db20e54369a845996972c9eeb4fc40d70"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine_grid.py_TestRandAffineGrid_": {"doc_hash": "04f756feba30fe6be44ea2cd9ce93796ce57ffac482fde39a92b549bb29241a6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affined.py_unittest_TEST_CASES._": {"doc_hash": "a6ea0e8b9aca435b631ea227a959522cc29336f6ee5d8db64fb073ed0bee9d4f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affined.py_TestRandAffined_": {"doc_hash": "acba5461d1796385ef97d69916a5a2ce97665942fb8d2d6a622b27a98d9e2ebe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_label.py_unittest_": {"doc_hash": "c13308dc4a5b6aaaf09b5259b4d8736f39f16c4280da803c447feeeb84b54783"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_labeld.py_unittest_TEST_CASE_2._": {"doc_hash": "a77126c35eb9c5e56cae93fcbd2465846aa5ab54b557fb8e92e5679061240795"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_labeld.py_TestRandCropByPosNegLabeld_": {"doc_hash": "e07bdb29a119148aa38b26d54b4e4d241420966e9e69898d42ca699492fe2e52"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_deform_grid.py_unittest_TEST_CASES": {"doc_hash": "9dc91ec1f4c8a87410de372907480b7c056cd80a4ac3e4ac577bf335c55f4736"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_deform_grid.py_TestRandDeformGrid_": {"doc_hash": "b0fc4ca641e4ad8ca50aa363691f88e4326cd3e6b0025f0ae4dc97901c66cc8a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_2d.py_unittest_TEST_CASES._": {"doc_hash": "ad9cc3accd9a7c5366ac7b318296c5aa26adbd818a98ab87d48c73847a3f69df"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_2d.py_TestRand2DElastic_": {"doc_hash": "490d19ac0def3368dac8512504fc6de2871a545207a0aab8bccbab1b6d8951ce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_3d.py_unittest_TEST_CASES._": {"doc_hash": "d59d8b1d0d98c6cc9ce627fed981002f4d094325f4c568726e08d3d4d85b8515"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_3d.py_TestRand3DElastic_": {"doc_hash": "358a37488163fb179407aad89776bb897d1a4bab083699527bf331b6fd28dd7f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_2d.py_unittest_TEST_CASES._": {"doc_hash": "247b4bc4c5dbdda4105173a8e04df6e77e1190fee843fee8dc09030d8173ceb5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_2d.py_TestRand2DElasticd_": {"doc_hash": "36b55bf1a0bb5306c43ee619c2bea5bd41a00b6882ed3e3b8dfb63634b8a1342"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_3d.py_unittest_TEST_CASES._": {"doc_hash": "24613c569b3dca406fe1534cfb2aa5cc93a09d86ee45360921024282d1ce4a5b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_3d.py_TestRand3DElasticd_": {"doc_hash": "74e7119d98c5a247afdce1a48ec30aca8f5c72014ef1cf0a32c1fe82f3ec1e1f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_flip.py_unittest_": {"doc_hash": "fbefdd318d0d5ea0c37211a545ce669376bd49629d43caf1c1f536df980162f7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_flipd.py_unittest_": {"doc_hash": "1fe6c4f3dce8859cd752efb2fb739055acb68cbd6e62afaa1d462b7f430ab6d1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate.py_TestRandRotate3D_": {"doc_hash": "db9b2682f7f9b27c9dd142707a3c429ffcea3a462c94fadb7d350a2e0040bfb3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90.py_unittest_": {"doc_hash": "8e3b36660cbe7053f51ed6cb1f0879f17651c85ec499a3a077a4490c9ec305b6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90d.py_unittest_TestRandRotate90d.test_spatial_axes.self_assertTrue_np_allclo": {"doc_hash": "011aac8000127d76ccd695dfab568a03335cd767a66d3a2c297d91e70cc68dd8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90d.py_TestRandRotate90d.test_prob_k_spatial_axes_": {"doc_hash": "8f95ba5c623c208d1967bcfc20cdf1fd3ef5e4ec3f1947f054575afedba40adb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotated.py_TestRandRotated3D_": {"doc_hash": "64b30b7f62579fafd21f1500182c74d08f280d8f8fa6bf241035eb226ff81e57"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_scale_intensity.py_unittest_": {"doc_hash": "604fabb0dbdcef750a5f04cd7a364c745cd3ec6e6b7cd1e80361ea5a1ae534ee"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_scale_intensityd.py_unittest_": {"doc_hash": "4f004802cdfb93fc20ba17b8144abdad714d55cc3c4e393bb5c42b82f0bad8a2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_shift_intensity.py_unittest_": {"doc_hash": "9515aa31b7dbc8ab539234d86d9b23bf504184a73b657e7bf684a20e9507eeee"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_shift_intensityd.py_unittest_": {"doc_hash": "521117df25849daaa8b78991df01a1f9e54c1f0f5fe452236468fbefe61e3454"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop.py_unittest_TEST_CASE_3._": {"doc_hash": "576e2d355739332c69b2ab15d12d6217b74f68b4892f204e64557c2cd30cf264"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop.py_TestRandSpatialCrop_": {"doc_hash": "1595b53f4bd1e222875f4387080fa9bbc1809c1fa0f31646210748fb9907dbb9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_cropd.py_unittest_TEST_CASE_3._": {"doc_hash": "1252f289f2b1fda2f603c629fa3d6d41b9dd7a63f0b1c781fdc4f67be6f6245c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_cropd.py_TestRandSpatialCropd_": {"doc_hash": "cf0a51b2676049d8b2c34daf2155b79b4724a018bfd57a2b83ee7fa3dd7a9b30"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoom.py_unittest_TestRandZoom.test_correct_results.np_testing_assert_allclos": {"doc_hash": "a6a7c36a0e0c527334ff2f83de5c869fcd0cfd921de20e5d975512b542b25044"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoom.py_TestRandZoom.test_keep_size_TestRandZoom.test_keep_size.None_2": {"doc_hash": "ef15738a6fd5d102b81d040d60454fee8f531a8385eaae98dd6966baa52688dd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoomd.py_unittest_TestRandZoomd.test_correct_results.np_testing_assert_allclos": {"doc_hash": "d096d5893421eb46695d79f7b96ae6437017d109538acc57312c34ccbc545a1e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_randomizable.py_unittest_": {"doc_hash": "1023cca277e63a4d91461a6f31fdf7423611faed373798baf4ec11e5743961a5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_repeat_channel.py_unittest_": {"doc_hash": "169acb328b139d0595a10a426ef1a0d9427a1d0673dd896e1c8508901549f9ec"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_repeat_channeld.py_unittest_": {"doc_hash": "20100ed0385a51b96c1e30d0c89b367e049c85a38bcb89d1c0804d7d9ddee587"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resampler.py_unittest_TEST_CASES._": {"doc_hash": "eb7d2e87d39ebd14dcdd54f91345796bd7b0233c0f7a07a08ed09dfeb716227b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resampler.py_TestResample_": {"doc_hash": "f4d82178d38d7e6aa71bcd457be26bba4cbc771adcada879d650b48da7d9d480"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resize.py_unittest_": {"doc_hash": "e3bbcfaacd776012b06d79b93914d865fdb1842a70eee59d59fa77a7f4ced263"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resized.py_unittest_": {"doc_hash": "fac48b9ce2a3168001eb3ec58df621541d29667bcd1b7ccca5797ff34321f11f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate.py_unittest_TEST_CASES_SHAPE_3D._": {"doc_hash": "2af266825b08364045f74ff4c753027e67be4efa13e958ad4864185c43263473"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate.py_TestRotate3D.test_correct_shape_": {"doc_hash": "34432312568c81bc903985c590dc4cafa9874c36101664aa7f252ee190ebcafd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate90.py_unittest_": {"doc_hash": "21920b2bd9bfe23970ebdbd4e8ace54ec5dbcb3316e677ca742095831a927084"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate90d.py_unittest_": {"doc_hash": "dd1614be1fb0395a2ad247bfd57cdd03ca5d8cf83d864841519ca562ba3b9ebb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_unittest_TEST_CASES_3D._": {"doc_hash": "36d37762f3e4279699351c432e543d987f7458968197bffc62281b699a192555"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated2D_TestRotated2D.test_correct_results.self_assertLessEqual_np_c": {"doc_hash": "97d8e86b2d38df4ed3246fbbd9741052bca5d5007e5253753ac1493f40b74e91"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated3D_TestRotated3D.test_correct_results.self_assertLessEqual_np_c": {"doc_hash": "f9aca93e5c5e9fad7bfb915d17c05174aeaff98a71910ec0cb230a3062687277"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated3DXY_": {"doc_hash": "ea6d879215880bc135259cb537dbd928c8d378472beec79349e3b7567266a113"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity.py_unittest_": {"doc_hash": "080d6a596041731a7d1b0fbd1048e0f181a6d232bd4c1dc632e901f28f54f508"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range.py_unittest_": {"doc_hash": "c8f1655ece9b01b24c0d8e66e482057412559d78de298d46faf0252bc7fa1317"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_unittest_TestScaleIntensityRangePercentiles.test_scaling.self_assertTrue_np_allclo": {"doc_hash": "a4cd0c3425abdb34e9ea522d1afe3440ee0d338e33f42eda402886ccd70e5125"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_TestScaleIntensityRangePercentiles.test_relative_scaling_TestScaleIntensityRangePercentiles.test_relative_scaling.self_assertTrue_np_allclo": {"doc_hash": "a801ad2bd2d33c93938eaca152603f226919971788e17dd1f4b18784f7630f42"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_TestScaleIntensityRangePercentiles.test_invalid_instantiation_": {"doc_hash": "f3da79e1032073fd0929835458ebc1ff1cc0df62c54e9d48016aab5a90d12d21"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_unittest_TestScaleIntensityRangePercentilesd.test_scaling.self_assertTrue_np_allclo": {"doc_hash": "577654f435cc6492c381b67df7741091055246acc9e99afe0e64e648cba83a80"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_TestScaleIntensityRangePercentilesd.test_relative_scaling_TestScaleIntensityRangePercentilesd.test_relative_scaling.self_assertTrue_np_allclo": {"doc_hash": "555c46fa1eb22984b64b4d26496d2c32e27334abf2a8dcc190d6f1ce0e63c4c7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_TestScaleIntensityRangePercentilesd.test_invalid_instantiation_": {"doc_hash": "801d0de8b2e7995e895305957a26df5369e8f158526470cf080982ca4820130f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_ranged.py_unittest_": {"doc_hash": "ff970a64f5c8d52d5f179e2ffc154419d989a6845eb137a8a5200567a9a278ed"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensityd.py_unittest_": {"doc_hash": "825cba8ed4d0d835d45f55d78750d3dce51817b35cf2de8894d8094474c29b03"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_block.py_unittest_for_type_1_in_.for_type_2_in_.TEST_CASES_3D_append_test": {"doc_hash": "6bfb7948ca22a671c068b9e2da3f6d3561748797af0d529e0b03a929e789316d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_block.py_TestSEBlockLayer_": {"doc_hash": "eedb8b4e108730515e2c945d40954a68ff7496cdd42b95a7d3bb9eb47de7031a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_unittest_TEST_CASES._": {"doc_hash": "9ba26fe2202eb8150efa5e4ff85bd6db21b9a0394c68007d43127dc74f6bc3a5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration_TestSegLossIntegration.test_convergence._define_a_one_layer_mode": {"doc_hash": "6338926272160b572203cd69a9584312f54afa00763756b4bb3b9334a47291ce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration.test_convergence.OnelayerNet_TestSegLossIntegration.test_convergence.OnelayerNet.forward.return.x": {"doc_hash": "1a72a77faa759300292ee8cf695d55a639cf177a1b817134c618bcda85c83071"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration.test_convergence._initialise_the_network_": {"doc_hash": "36055777668c0b15c149a120824729f062ab9ed67b4ea1ea9c1894f19e54fa2d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_set_determinism.py_unittest_": {"doc_hash": "ca1406e76e6f3a4efbf74f13b043ba086830c5196bb93f115e0dcd76a09f48f3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_shift_intensity.py_unittest_": {"doc_hash": "eafcfe0ffa84f851d775de4ce015593b3658e1062cb38273ba8b338da7b32ee4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_shift_intensityd.py_unittest_": {"doc_hash": "ce64070f3124c46bc0d833360c533654e7f9476efe6b1130a4ba71fe3893e752"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simple_aspp.py_unittest_TEST_ILL_CASES._": {"doc_hash": "12b0c08db18ed5804a28d41bc8d3c2f555100e114236c08c7f2500953f882faa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simple_aspp.py_TestChannelSELayer_": {"doc_hash": "a8c17159c450b22f4eddf3264fb237157c6bf74f0458c44d8398124b0314d480"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacing.py_unittest_TEST_CASES": {"doc_hash": "a758e49290a0399571611e27bd97163e1c8a30bb080a579d82b2755302277bfe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacing.py_TestSpacingCase_": {"doc_hash": "82c24da42e2802c05cf8e55c64c833a5bc905cc363a34061021f869c92f5efe9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_unittest_TestSpacingDCase.test_spacingd_3d.None_2": {"doc_hash": "230a78eef6b7d175b2478a1b2e1421081c5c73d4b4cccf295aa039f26737fcf7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_spacingd_2d_TestSpacingDCase.test_spacingd_2d.None_2": {"doc_hash": "bbc9929b7956ddbb74b7b21c283503a1f9be054cde0e7a7e60bf897d2bec8efa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_interp_all_TestSpacingDCase.test_interp_all.None_2": {"doc_hash": "c0801ae0c94f248ab047b47738603808d57a2620377a16f12d1f619f4ab4541b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_interp_sep_": {"doc_hash": "4489bd03dcba1525560690554bdce03dc5201ca30f5f494f890f5e8b4d6c1438"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_cropd.py_unittest_": {"doc_hash": "b2faabd5b98cb5730fec5ce6b5e67caecb77c24f834fd358b39ea0e885ac2bf3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_pad.py_unittest_": {"doc_hash": "df61fac2fae4f7913a19c47ffa45edff34aaefbf57f4f14d2e414cbf327cd21d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_padd.py_unittest_": {"doc_hash": "77f9041461dbf4785e5cc33b4dbc96295d600516e82b7581b8adee870ed9e01c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_split_channel.py_unittest_": {"doc_hash": "f160713cd61346a8b07c6a3dd5c15a78a390c84f93236bd76e4e615de0902fc4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_split_channeld.py_unittest_": {"doc_hash": "8f7f55fd5a5da968f92ae40f84617de5a1c409488a554c382e457cbfda72033a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedim.py_TestSqueezeDim_": {"doc_hash": "44c0af1d49b45a8b3285cc6683a2c87402fc928c0b289fa1530ec17e11d3e72a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedimd.py_unittest_TEST_CASE_6._": {"doc_hash": "23db653c6a59bcb945b0f48e7a2500836205a03aa5054c7273c9ddd60b5e01ad"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedimd.py_TestSqueezeDim_": {"doc_hash": "dafd8491cda6edaac7a92227fe4aecd6c1076bae4afb78d62def82fe1fb78b07"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensity.py_unittest_": {"doc_hash": "e79e4ba960fc33eff5c251dc18936ed295281ba9030c1e1ad9d61c45be09fee3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensityd.py_unittest_TEST_CASE_3._": {"doc_hash": "f9e2b7808eee0e25a6b0744d6b481e458dcedd1a643c312f0eba68f9a37d37cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensityd.py_TestThresholdIntensityd_": {"doc_hash": "4d88f713215646861a00b981f571d1f9e441cd6b9291ea2c0f6ab765fa1c384e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_numpy.py_unittest_": {"doc_hash": "a9a5839de0f9e4b751fd746bce792f1cbeedabb7a98a96d19d7b14c21d4ee22f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_numpyd.py_unittest_": {"doc_hash": "fa305f5dc7e8f1c7ba6c219bf84e139a406ccfe41403966179e0fb8d7d499ca3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_onehot.py_unittest_TEST_CASE_4._no_channel_0D_batch": {"doc_hash": "f39b2da576e3c007e2302a2a9b08246184fc8a6578dd0be7ab14e7147ca683e7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_onehot.py_TestToOneHot_": {"doc_hash": "3a657d69562fd4cf3e3b3fbe7e82af9ad68eec651ee575f82ee848154ca1833c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_TestTverskyLoss_TestTverskyLoss.test_ill_shape.None_2.TverskyLoss_reduction_Non": {"doc_hash": "263a13605a29548fa24916068acc28f94996c25c7f1793cb89669e3942e5c47d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_TestTverskyLoss.test_input_warnings_": {"doc_hash": "c8ba71213081eb356cd4ea5c756dc02c59b0b3ce1ad1188a5fc61879b65dae03"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_unittest_INVALID_CASES._None_None_bilinear": {"doc_hash": "01e77fd22239f60bb70e62fb607f2dae168d07a980bee5d6a5fe076444333483"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom_TestZoom.test_correct_results.np_testing_assert_allclos": {"doc_hash": "8ec0941bc7a4b43bfb23cd2a387e2745d0bfa29b11005c1b228ef165bf10f212"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_unittest_VALID_CASES._": {"doc_hash": "e0b96f6d1ed34b28ec0ecc8125906ca7f9b10c00c9abc59745bd16978337ceca"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_DIAGONAL_CASES_DIAGONAL_CASES._": {"doc_hash": "8a8bd01afc5596687781fded052e0366e694eefe3b112075d2272137934f9b1f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_TestZoomAffine_": {"doc_hash": "bb8fb4f6755eab54647c64234fccbc08a52816da71293d07c72478fb6997c520"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_unittest_INVALID_CASES._no_zoom_None_bilin": {"doc_hash": "57196f15633094b2368b4f3ad13141c112982b85f03eade8333b3d7f191c85c2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_TestZoomd_TestZoomd.test_correct_results.np_testing_assert_allclos": {"doc_hash": "227aed112b4f317ecf14b08af0fb68e3b15a0a489a44760e699a16ac11c9c555"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_TestZoomd.test_keep_size_": {"doc_hash": "325c1caf645f717dd6d1ecc078fe56511effc2df291ef04b2e667deb1106338b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_make_nifti_image_make_nifti_image.return.image_name": {"doc_hash": "9aff81a921d77846bf73c9b99ed694af72e277eb8e585c6fead25386152f3002"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_NumpyImageTestCase2D_TorchImageTestCase2D.setUp.self.segn.torch_tensor_self_segn_": {"doc_hash": "588bffb4c26254305253faf110da2b324861d58a0cd1d1d4a3ec83679c9d378f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_query_memory_": {"doc_hash": "5d24842c5d28a29557ddfa9cda4d9d943d0bc26d9bc4d2e909524d6f4943924f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_config_from_root_get_config_from_root.return.cfg": {"doc_hash": "636bc04679b7c70a1fa18e7ac84773fc15772f0e8716aa4b69774539f08b0a97"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_NotThisMethod_register_vcs_handler.return.decorate": {"doc_hash": "37c56a26bfa3267b7ee4d7eb4b9f2825d576a46a924203ef084404ec204eb3a9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_run_command_run_command.return.stdout_p_returncode": {"doc_hash": "686989e23abae5b809b4c7229d942fb392850c570e837808fe58e06eade68363"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_get_keywords_git_get_keywords.return.keywords": {"doc_hash": "24589a0695cc6db686b17707d2ae846bb00b32afb520696f4ba33bdcef540ce7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_versions_from_keywords_git_versions_from_keywords.return._": {"doc_hash": "8e8f53330ccc17d402918cf8352ba39b12d06b3ba72330525d7752cf18238233"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_pieces_from_vcs_git_pieces_from_vcs.return.pieces": {"doc_hash": "9340a2f5ae3766b743e79558e1c4892c28fe06a6b345fcd712d2a804f8368c77"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_do_vcs_install_do_vcs_install.run_command_GITS_add_": {"doc_hash": "ce6841ff519b17aa2d7473d5ad7ac0e07f12143a79c7f074763a9631c2fc7184"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_versions_from_parentdir_versions_from_parentdir.raise_NotThisMethod_root": {"doc_hash": "eb27147b4dca2b18020791f0e65c0fbf7aec6bd2532f6ea54436911b8190d4b1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_SHORT_VERSION_PY_versions_from_file.return.json_loads_mo_group_1_": {"doc_hash": "829ef4bec48493276727c09a38f1d04cb9a3919a539c4da1d239fb522ea32147"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_write_to_version_file_plus_or_dot.return._": {"doc_hash": "75973a5ef462074580ea01f4ad51787ab7d7bc39ff5eda566ef9f40d05aeda5d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_pep440_post_render_pep440_post.return.rendered": {"doc_hash": "118807a5c2a97d74d8fe5f796e9018ac18aadcc9b3182b6181ecd25e8dcbd0d7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_pep440_old_render_pep440_old.return.rendered": {"doc_hash": "97c87acb509ae5cbe63b80d324df13c6839f50c81c34d171697e57c312f2e180"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_git_describe_render_git_describe.return.rendered": {"doc_hash": "a3d9b39392875e0334af8c64b46fe88c38f8d51c8b168839e4ae228db30b0daf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_git_describe_long_render_git_describe_long.return.rendered": {"doc_hash": "cf56b90f4c7b7ac99479cdeafeebaf306a0ab42713bb536dffdfe85f0a8b1c00"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_VersioneerBadRootError._The_project_root_direc": {"doc_hash": "8830b95a2390f35b59b15edddd77bdcb0dbc6a910169bd3a9f57b3c382046551"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_versions_get_versions.return._": {"doc_hash": "8c9846352af5fa04f4ae6f00259417363a735b385dc9858b46939a847d6928f6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_version_get_cmdclass.from_distutils_core_impor": {"doc_hash": "1c049e56b5b17ba505fccb46547c16d5952097f16336fccd2dccb2548aa9bd98"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmd_version_get_cmdclass.cmd_version.run.if_vers_error_.print_error_s_vers": {"doc_hash": "bf8482b590feaed74741d6cb9ac691b8bfd55f50f0717591038782e486f88099"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmd_sdist_get_cmdclass.return.cmds": {"doc_hash": "015db1e8482c2e175440634d93bd5281420a7f6cbc8ef3e10a0658e88e5f4a73"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_CONFIG_ERROR_INIT_PY_SNIPPET._": {"doc_hash": "d06ca46b62d1ae35674a46018618ee9c434fbfd9d1553dd0feba30be65ed1f96"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_do_setup_do_setup.return.0": {"doc_hash": "e8fcf2f3b4250714449e5c758c79747c92bef475be869d16721af41a42935536"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_scan_setup_py_": {"doc_hash": "e06f36e2fe2775a2e960e356ddf105fb556e2ab136048f23c6fde564ced2eb1a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_extractall_extractall.if_filepath_endswith_zip.else_.raise_ValueError_Unsuppo": {"doc_hash": "664b5dbd589ab9234d275a653b25966eee072e9541999d8111e5a8a88f37c17d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/type_definitions.py_from_typing_import_Collec_": {"doc_hash": "7bdc114b362413bb49f3af72c63f0b30f4f081a5caabe3e8e82e4cb483ba3cae"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_csv_CSVSaver.__init__.self._data_index.0": {"doc_hash": "d0782d4cba5ffae0bc1e41f6dcc4a03486b5328ab916df62a64360202881c777"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_from_typing_import_Dict__PNGSaver.__init__.self._data_index.0": {"doc_hash": "21050d83f335147113c5755dc89984f79be40212a781034eff07eb661b1f2ef4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_from_typing_import_TYPE_C__default_eval_transform.return.y_pred_y": {"doc_hash": "cffb25c8c7c5af2d4b1b9853102bc7f6bf4e22a8803e9763252e504649c68d40"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer_GanTrainer._": {"doc_hash": "ea46783e20e47346751cedb4a2711117431f11eba3c0caa8c2d95a1f61bdc57f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer.__init___GanTrainer.__init__.self.g_update_latents.g_update_latents": {"doc_hash": "56c75f260b54bb3c274701180e986d3c8a201d23828436da536cec0e64b7d40f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer._iteration_": {"doc_hash": "34b4b81dbd06dce11bcefb63fed6700ab35e3f17c6fe1b51795287dafa639f06"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/utils.py_get_devices_spec_get_devices_spec.return.devices": {"doc_hash": "567853941fa9643c29488a407f1f4f788b2be2572878254bc5200ca306d585b9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/utils.py_default_prepare_batch_": {"doc_hash": "9e65e4ef0bd8eeb7976d2189e6a8088ae6907841f93124e6bfdd5fc6011f33e7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"doc_hash": "8ad3439c7234be8ccc3e643e9a72309069a2c59da381ea82d8de573624299cb4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_CheckpointLoader_CheckpointLoader.attach.engine_add_event_handler_": {"doc_hash": "154f03acc2e4da05f61071d7f73029eac111ff14fa62d6095dcf9dd8cf8acf3e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_CheckpointLoader.__call___": {"doc_hash": "96cfc1aa0bb4c671e4f39470750cf74ab767306d7707f7e28bac74e56c4be734"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.exception_raised_CheckpointSaver.exception_raised.raise_e": {"doc_hash": "9e5dc17235c979df033adea9608b7fe5ac00bfd17007b26e717375f4eed91514"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.metrics_completed_": {"doc_hash": "6a673d3250d2e08ab72380df3b5f075a86630ba8f4ee7c243b6478d0f264a61e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/lr_schedule_handler.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"doc_hash": "f9e93e0cf7a56870636858f73b578209216cbb3f194d0dba1ced6819d722b059"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/roc_auc.py_ROCAUC.compute_": {"doc_hash": "d4fabffed96cecec7849d60c88abc7dea57232581436e4a66de0cdab5d69ac04"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"doc_hash": "fd4d8cb51c9faf5805b42e34848a57ef0fa12c5760645bcd27a9fe976b4583b4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_logging_DEFAULT_TAG._Loss_": {"doc_hash": "532ec08d5af5493a8ba8ddf0968a76647ac153cc33b76e2a5ff5f703b7ab2b8a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.epoch_completed_StatsHandler.iteration_completed.if_self_iteration_print_l.else_.self__default_iteration_p": {"doc_hash": "872f4268df2dd0e53f5ebd3cd165cab11a81e51aafe97e5a94c1476e3e463381"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.exception_raised_StatsHandler.exception_raised.raise_e": {"doc_hash": "4f77bd6c5794dff36021c30679355817996bc20524f3e80c9b6d108f57952bd5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_warnings_DEFAULT_TAG._Loss_": {"doc_hash": "60a0d7b55c4a53dfed52771f478b5bd2a93c6adc03cd7164ba5783569e4cba28"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_warnings_DiceLoss._": {"doc_hash": "36762ac2824be9dfb1266ed5e403f368a925f62a982f434385b5a24196565d5d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_MaskedDiceLoss_MaskedDiceLoss._": {"doc_hash": "d5cff4072b4bb2cb9eaf088e6f177765cf8828f43c1a1335a9e8d97ee7dfc2b8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_MaskedDiceLoss.forward_MaskedDiceLoss.forward.return.super_forward_input_inp": {"doc_hash": "c3cde431485dffca4b0c9a43eea1c71b61becd16637a1730f22b4e6118617add"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedDiceLoss.forward_GeneralizedDiceLoss.forward.return.f": {"doc_hash": "3a70c3944ded92fe0a5befe57f81877efd62790cf91425a75c52a5fbcd155de1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss.wasserstein_distance_map_GeneralizedWassersteinDiceLoss.wasserstein_distance_map.return.wasserstein_map": {"doc_hash": "bb7b6e798da164fc8c46bf2aca6fffc70ae7cfadb1ee489c91bdd89341c10656"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/aspp.py_from_typing_import_Sequen_": {"doc_hash": "a284751cae3b5d10d80e9d84c51104f6f852f6c0df0e0f001ec543e0c43b2b5e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_from_typing_import_Option_Convolution._": {"doc_hash": "790c0bff2bd7ab0c45e52c47adf75b226a735caf675a94b58f9aba7975b0e68e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_ResidualUnit_ResidualUnit._": {"doc_hash": "282a03841315eb196d03eab67d3ddf7da2ba9e312622bb3f26d42f155422fc12"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_ResidualUnit.__init___": {"doc_hash": "ddb9b219511ccaddea50053483e59e547a4572664ec189dfe946d303d992fe20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_from_typing_import_Type_GCN.forward.return.x": {"doc_hash": "cc4bea2741cc4506fe65801c2939900ed7e2c45da1dfad8b4a1941b67fb21780"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_Refine_Refine.forward.return.out": {"doc_hash": "64fad06897eabc2b8c7e918554219367d7ea3d564d2e11d2c4ef7728c07b2fc7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_FCN_FCN.__init__.if_self_upsample_mode_.self.up_conv.UpSample_": {"doc_hash": "42a6a1a4ba7af8c8180bba2b0de22203725f3e88cc18aed4ca4a48faaba5f676"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_FCN.forward_FCN.forward.return.out": {"doc_hash": "e7355dc03778c29ae76c7b07e1712d2db6cb9b78540bb932c539cfacce526c89"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_MCFCN_": {"doc_hash": "bc8dbbe3d6a01ab3072c28efaa9e788c6508fb323304ed1f70724c264c816841"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/segresnet_block.py_ResBlock_": {"doc_hash": "840c8c347444adab86510f9e830623be83df748fd30f0f450ae583d4816f7d74"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEBottleneck_SEBottleneck.__init__.super_SEBottleneck_self_": {"doc_hash": "6676fcf0c6b446cde200f51f23efe0de813e11e8a20a02c5b97a1320836c83cb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEResNetBottleneck_SEResNetBottleneck.__init__.super_SEResNetBottleneck_": {"doc_hash": "8948988429b8627954706cc725af49b4fd257dd4e7dbdd52e7a4f9cf31cbf72b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEResNeXtBottleneck_": {"doc_hash": "24c0a56fd5530790f05d71877527aac2caaa4b7d3c6c38f34b15c5f0e69f2b13"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_calculate_out_shape_calculate_out_shape.return.out_shape_if_len_out_shap": {"doc_hash": "2f7fc082bde8ce84222c9bacfaf84095be9dce50c1bd38b24ba0bc9a0a777dcf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_LLTMFunction_LLTMFunction.backward.return.d_input_d_weights_d_bia": {"doc_hash": "b74aa2dd300073e30d9cbde4e3626c09843f96a400a6807b8848efc73e3a6eb6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_LLTM_": {"doc_hash": "8beb7974cd465385d9c664f2336ef6c6eda6ebb9d957e5f97bc229271d49a516"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_math_Bottleneck3x3x1.__init__.self.pool.pool_type_kernel_size_1_": {"doc_hash": "008e00645dbe48c03df5eb879a0b70dd74bacdee4be8c8db3808be90bf277b99"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Bottleneck3x3x1.forward_Bottleneck3x3x1.forward.return.out": {"doc_hash": "1b0102c31efcc220393815798521f4633a01db2295ae54b324867abe4ad03ab6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Projection_Projection.__init__.self_add_module_conv_c": {"doc_hash": "43e8c4c588e3a03608835d468b10f538c0d9ae07edd5dc973fcbd33a1968488c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_DenseBlock_DenseBlock.__init__.for_i_in_range_num_layers.self_add_module_denselay": {"doc_hash": "38205f4c566797c2266a3547869aa47f2d7606aa2a96716106711c26180574ca"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_UpTransition_UpTransition.__init__.if_upsample_mode_tran.else_.self_add_module_up_nn_": {"doc_hash": "2f2f2472e670878d82c93fd7c2fa93b313fd0dfd0b0831f0abc8d43d3e33c9f4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Final_Final.__init__.if_upsample_mode_tran.else_.self_add_module_up_nn_": {"doc_hash": "e09ac00eb6ea2344443bfb77643661f489fe4b25949a3dcc22cebf4c8112df7e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Pseudo3DLayer_Pseudo3DLayer.__init__.self.dropout_prob.dropout_prob": {"doc_hash": "928f9cdfe5ef3f55f3adebedb0a470822ef0fb509731b92c53ec61500c043f8c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Pseudo3DLayer.forward_Pseudo3DLayer.forward.return.torch_cat_inx_new_featu": {"doc_hash": "aa405664fa773cee91a41b41763eb059f3803e2d418c9934bea9275d16b82221"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_PSP.forward_PSP.forward.return.x": {"doc_hash": "920eb23a4fd1172496239fad1262a432d0ea2a00d9ad5fb7af1c4763c3248d67"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet_AHNet._": {"doc_hash": "59a0772d963aa73ac5f34fe43af8bff683249b4e293dbd42da1d250a1219680e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet._make_layer_AHNet._make_layer.return.nn_Sequential_layers_": {"doc_hash": "1da5127fa74153fe5675d8e8cee36e8a865d5b8c1e2e11fdd2ae055e9989672c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet.forward_AHNet.forward.return.self_final_x_": {"doc_hash": "d18ccd430d83130a0cd494ebeb792524a3477ba4dc8b22b03af981797a72537d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_from_typing_import_Dict__DEFAULT_LAYER_PARAMS_3D._": {"doc_hash": "748a6fcc0fbb77569d8389f06a4e675ae3795656b84448d574041c7ba184ff7f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_from_typing_import_Sequen_UNet.__init__.self.model._create_block_in_channels": {"doc_hash": "843e0b741dd0e545ba34f31d2b50a6a2df0a3fd6245729d1972a3795cd46eadf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_from_typing_import_Dict__get_acti_layer.return.act_type_act_args_": {"doc_hash": "bed721fca00b2f6a71c611d525a215769faeb089dcd2ea9d9f48b4f3ae7c8576"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_LUConv__make_nconv.return.nn_Sequential_layers_": {"doc_hash": "6420e4fbe4d80e39eec59b322b9171029d98130f4a337c48868b2ec297039e0e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_InputTransition_InputTransition.forward.return.out": {"doc_hash": "a81c23ceddc3c38ddad13e6459c0d76afd8c9b3972d4df65ac195e994708eef1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_DownTransition_DownTransition.forward.return.out": {"doc_hash": "0dfc9931d799031c2b549adfc7466e30feff59fc6a429b65cd36d81a78fa69f5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_UpTransition_UpTransition.forward.return.out": {"doc_hash": "cc07d55c3b9cd0dde08e8b70eb25b92d367b37bcd53fad3a9a486f97ac5b5c24"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_OutputTransition_OutputTransition.forward.return.out": {"doc_hash": "6aeb84c428641e451538371951c3ebb6d775d84138f83cbc8f0e63cf1ed02be6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_VNet_VNet.__init__.self.out_tr.OutputTransition_spatial_": {"doc_hash": "d1f631f264265a18f706dcab86ad5c7d3f53f6e0e67fcbac046e30a0581f8994"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_VNet.forward_": {"doc_hash": "4768544f0ae8b416c2e6880dc36ca0d9911ea500695c4046139a7b10bbeb7d47"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_normal_init_normal_init.if_getattr_m_weight_N.elif_cname_find_BatchNor.nn_init_constant__m_bias_": {"doc_hash": "fe91485b445b2a78ae6a1252b156a3553b016e24e92d48a9d2512c26e59efdfa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_icnr_init_icnr_init.conv_weight_data_copy__ke": {"doc_hash": "01bffed4129fbbde7349e2502ad3c5b1ce6628aa1867c38b2984d3d7a584ad7d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_from_typing_import_Callab_adaptor.map_only_names.return._v_ditems_k_for_k_v_in": {"doc_hash": "4d29ec5de78e077b173ecd46a02af570734e4483db91e5dc9f5681ad03f1793f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd.randomize_RandSpatialCropd.randomize.if_self_random_center_.self._slices._slice_None_get_rand": {"doc_hash": "2711dc8a2e9910df98f81540ca69b700e114d440230c9dc7e3d69dae05778618"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd.__call___RandSpatialCropd.__call__.return.d": {"doc_hash": "a73cee00a073b6298786992e024a468fc959cd8a284105ebb2d933807615a2ed"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensity_ScaleIntensity.__init__.self.factor.factor": {"doc_hash": "e3d00697d2a11cb61b06222559122422489c843f2e0df4f4370b344aa2850f55"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_MaskIntensity_MaskIntensity.__init__.self.mask_data.mask_data": {"doc_hash": "0e5e8100f875cc6a06a45b4e25ce0e26bb67341af481a515440ef4a94b56c81c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSmooth_GaussianSmooth.__call__.return.gaussian_filter_input_dat": {"doc_hash": "c03d05d3674076bc3db78d0246256598e8cdb8d5578fb8b168848a7a6c8a8100"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSmooth_RandGaussianSmooth.__call__.return.GaussianSmooth_sigma_sigm": {"doc_hash": "3d6f86f85fdcfdc13f6675a28d83c759bbcd55760002bf98f6bed933cfd9588e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSharpen.__call___GaussianSharpen.__call__.return._blurred_f_self_alpha_": {"doc_hash": "17762272adc36162ab374e6dad6c7217a982e93dec865ac36e1fa828e9a8183e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSharpen.randomize_RandGaussianSharpen.randomize.self.a.self_R_uniform_low_self_a": {"doc_hash": "43e07b61ca38ab55a66bdd560eab99dda4bb76fa8b7936a352c3ab62d5ee2234"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_GaussianSmoothd_GaussianSmoothd.__call__.return.d": {"doc_hash": "de3e57cbc3367b3a13d2808d0819a330a3529f7b6686d169203f49e12106992d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSmoothd_RandGaussianSmoothd.__call__.return.d": {"doc_hash": "ae4ac9f89d6be72fb5c48cbfa584d1d269f8caf5c9369846f1e580fde35950ba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_GaussianSharpend_GaussianSharpend.__call__.return.d": {"doc_hash": "6d3fff5832812830ad0dce51ae74ad8d9766baa9fd506f11099d8bb19ff931e0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSharpend.randomize_RandGaussianSharpend.randomize.self.a.self_R_uniform_low_self_a": {"doc_hash": "0a9afe59800967bdd6bbfb8f2549a9ea21d04cf7cc0304d0e727e913a1c04c84"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSharpend.__call___RandGaussianSharpend.__call__.return.d": {"doc_hash": "2af905e86ce0ef2a1c587dc472ef1e0479f494bf412af8d6f142ee5d1a7765a6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_MeanEnsemble.__call___MeanEnsemble.__call__.return.torch_mean_img__dim_0_": {"doc_hash": "c042e246bec273a952c3e6c7136b20b2949d84590a748d8039ac1363509cc042"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_VoteEnsemble_VoteEnsemble.__init__.self.num_classes.num_classes": {"doc_hash": "05d79fad8b2ac2e52d4ebb4de1ed7e116e48e0d9da24d868e80fe55ab7b7f71f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_Ensembled_Ensembled.__call__.return.d": {"doc_hash": "08c1649766c5957030de1b42f3e0a5c911851349305169b4b4b2595d09df15d0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_MeanEnsembled_MeanEnsembled.__init__.super___init___keys_en": {"doc_hash": "5452b6848214a0d2b04c5d2821690e9acb1933d605dfda54b28280df934666b9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandRotate_RandRotate.__init__.self.z.0_0": {"doc_hash": "c4ca9972fca69e467c9bae2ae118399359293f2854720adf80b340c8b4ff4005"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd_Spacingd._": {"doc_hash": "f6390e1b606884b9c145084fcb17d5eea0358885e2dab67fce1a3c88a7d5feaf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd.__init___Spacingd.__init__.self.meta_key_postfix.meta_key_postfix": {"doc_hash": "9233fc6eab6f8a9837457a05d6bfa968024b36c16396f0d06c5f2d5744428635"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Orientationd_Orientationd.__init__.self.meta_key_postfix.meta_key_postfix": {"doc_hash": "a9606f84656dc0e7db44c8ca5892d5838a38e7c9443c584749d457bcc4860dd4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Orientationd.__call___Orientationd.__call__.return.d": {"doc_hash": "4873b03b5eaf2374cf2fbb80df0c3602f953d069cf78b0ddbe3d2d0133a4e4a1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_logging_Identity.__call__.return.np_asanyarray_img_": {"doc_hash": "44fcb13f92d6a71225f4ecfb6708b23229b67dc6127bbc1158557658eceab364"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_Lambda_Lambda.__init__.self.func.func": {"doc_hash": "bfc418f114ad75439f76e887d6e3d36bbee2a4fd86219b49fa10c9076863e9e3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_copy_from_monai_utils_import_e": {"doc_hash": "2870ab2bed35b95dcc3b7dc68686be14f6555c60e04c6fc919e44d4fdb4732a4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_rotate_create_rotate.raise_ValueError_f_Unsupp": {"doc_hash": "dfbaeaec2e1efeee554198c80749e54a1beccf497c852b84c27291aba5dcc977"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/aliases.py_importlib_alias.return._outer": {"doc_hash": "8f904e998bb3e55dc7a762c1b99a780a941194b142f914438fb72366f9e2547e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_collections.abc_ensure_tuple_size.return.tuple_tup_dim_": {"doc_hash": "4cc4acb52193d12e8d277c32774e70e8ac0a1f1f1e4c2e9db4d47f64be8dc325"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_ensure_tuple_rep_ensure_tuple_rep.raise_ValueError_f_Sequen": {"doc_hash": "5757e4d9adae76c0faa59482f94741c61b3fd85e8f46d1cd6ed75c41c36392d0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py__image3_animated_gif__image3_animated_gif.return.Summary_value_image_summ": {"doc_hash": "5b92b495f9d57ca2f05bf8bc41c96155ea334ced1d136d263c399134c1ee46f3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_itertools_TestAdaptors.test_single_in_single_out.None_4": {"doc_hash": "4b24f84b2c5da28cbf0631f80106ca066879e35bb2864966145d9f6e1f63c056"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestToNormAffine.test_to_norm_affine_ill_TestToNormAffine.test_to_norm_affine_ill.with_self_assertRaises_Va.to_norm_affine_affine_sr": {"doc_hash": "860e5aab62199adb26a0d6d1c124577538fd42a1638331d3b1926697040472b3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset_TestArrayDataset.test_shape.with_tempfile_TemporaryDi.None_13": {"doc_hash": "b069cf6b9c0721930850269363811c970a1cd93b142512c511989199fc349239"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_default_none_TestArrayDataset.test_default_none.with_tempfile_TemporaryDi.np_testing_assert_allclos": {"doc_hash": "3ed7e0f15f78a727944dda70b4973d70cb8e0bc45d783a0d46278d5a15bbc77e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_dataloading_img_TestArrayDataset.test_dataloading_img.with_tempfile_TemporaryDi.None_6": {"doc_hash": "ceba66a5ad1b2b061f8af22f09991bfca74850ba05a4fe057b4925a39169eb4e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_dataloading_img_label_": {"doc_hash": "6e68d8fde09cc0a532a88ec4a682d96f626d5e7fec562cc60961f2a2f6514766"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachedataset.py_os_": {"doc_hash": "85b430b8c2484355201d7cb00eeb1b61e32e472dece011387f19864a1fa06054"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_roc_auc.py_unittest_TEST_CASE_7._": {"doc_hash": "548b681a5da1d37056fb96adebeec438db0bc9b9c1360d3a6648e8684e205171"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_csv_saver.py_csv_": {"doc_hash": "513d041e18f2f473405b42bbe36d44913f777fe38c4d399631e8be1bc826ab5e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_statsd.py_logging_TEST_CASE_6._": {"doc_hash": "9cbcda0f7e098909563933b0312842910ce19973518776a0ca056aaeda492464"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dataset.py_os_": {"doc_hash": "4bb870784cc034495532e0a382f3b3a9015a9af5135c36216ebb480f165aa3a0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_decathlondataset.py_os_": {"doc_hash": "366c001bea6d450075cd51c120e99a6cf4ffac41f08ad31da49e37e01b166421"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_delete_itemsd.py_sys_": {"doc_hash": "3f715daf127b0471980cc3c945ae04547e55d02ad62574471056717771cd9980"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensemble_evaluator.py_unittest_": {"doc_hash": "29c8cedc04a177647de797a1001ca18375dbec7ef0629a6dd2482bdc25bbbaea"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_sharpen.py_unittest_": {"doc_hash": "91ac5a994289a03b03389950b2a713a91ccabf61e31cc941da9728bc59eefcfd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_sharpend.py_unittest_": {"doc_hash": "2f526b10984f772fa97abf1bf577caedf91b1788b3846ed787ce7dd69f0e90e3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_smooth.py_unittest_": {"doc_hash": "5ad301e736bf16dc1e71cfe8a0d05f7180bed3e236ac37ee3ee771dc1adfd3ac"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_smoothd.py_unittest_": {"doc_hash": "25e97ed6f7a9dc7db45d5100e02e1e8b31ef2d094140c0445a97b2b6250cc755"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_loader.py_TestHandlerCheckpointLoader.test_two_save_one_load_TestHandlerCheckpointLoader.test_two_save_one_load.with_tempfile_TemporaryDi.torch_testing_assert_allc": {"doc_hash": "3d7423719802826cd0159fb789a0c8d54934c448d73f28f8d46344a334e0b786"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_saver.py_TestHandlerCheckpointSaver_TestHandlerCheckpointSaver.test_file.with_tempfile_TemporaryDi.for_filename_in_filenames.self_assertTrue_os_path_e": {"doc_hash": "cdfcf86d801efe13e5ef80af8a43629a371a16d06b77138e9451accfdb56ce99"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_classification_saver.py_csv_": {"doc_hash": "458ff14118822fb2d8e5dd38f69460659abbe101ff955e6297bb0bc381f6ebad"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_lr_scheduler.py_logging_": {"doc_hash": "e65853e99b78a75edc28cbe231b7f99aaabc138f8bc9ddc6ca7fb63f6609ebcf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_segmentation_saver.py_os_TestHandlerSegmentationSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"doc_hash": "6751cdbc7fc6e6f8003af9cda194b5f96730a17204dd42aacaf92d8924db4d4f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_logging_TestHandlerStats.test_metrics_print.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_5_10_.self_assertTrue_has_key_w": {"doc_hash": "45300a02213f4119f622f420606c829d0ce85d9c795b8f6365a5b3e30b654495"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_stats.py_glob_TestHandlerTBStats.test_metrics_print.with_tempfile_TemporaryDi.self_assertTrue_len_glob_": {"doc_hash": "ea531bfc35eb40a9e30cb5ffbd63188859b1a7d3a8653bef2a1d1fb473ae75cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_img2tensorboard.py_unittest_": {"doc_hash": "f33e08e5a858de42f9da71eb6ec1e4c4740ad154c35328f8f57927938f49c91a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows_gan.py_run_training_test_run_training_test.return.trainer_state": {"doc_hash": "d3eae47020312076e374364a8b3d137ac84da167eadde8d36575be8516f7ccae"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows_gan.py_IntegrationWorkflowsGAN_": {"doc_hash": "5f993f31ba5d9d568e066edbcedfe104affbd9d5914288412b82d2af151e5882"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_list_to_dict.py_unittest_": {"doc_hash": "9f5d4c471ac1aba7bce8e991b587d2814446ed4fca665753b69afa67f121a361"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_unittest_TEST_CASE_1._": {"doc_hash": "cbc43f6b31061ed3f9eec3e5050bec2c408a5fc39720bd2e8610518be4356d5c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_TestLLTM_TestLLTM.test_value.None_3": {"doc_hash": "76a8a69548e6a707e3c86c372d7da62eb762dbc8945609c5b2c8f7778f215dce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_TestLLTM.test_value_cuda_": {"doc_hash": "675808b8df4bc2325b5fab39c71cab1a859bc7d28ff5e9dd7e3831fd847483ee"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mednistdataset.py_os_": {"doc_hash": "9d8b0e4428fae1fc52c00c8afc7b32f12198339fc5a5f548b057e9dfbfa5b2f3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_2d_TestNiftiLoadRead.test_write_2d.with_tempfile_TemporaryDi.None_5": {"doc_hash": "2752947fc53c72ea49910bef82fdf1da559782610d49c712c3eab5ef418d088a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_3d_TestNiftiLoadRead.test_write_3d.with_tempfile_TemporaryDi.None_5": {"doc_hash": "292a1e0ec91df9bf9c2e7c6d7ab61e067e1514cd96b3c02360b66e0b77cd8844"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_4d_TestNiftiLoadRead.test_write_4d.with_tempfile_TemporaryDi.None_5": {"doc_hash": "b92183ed5c9f20b0bd0901a63bea750b629aec41a916df99eaa6913d9e9abf0e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_saver.py_os_TestNiftiSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"doc_hash": "a6f5e1d06a7004b0b9c3e2d9ad0b9c5fbedc164683493161cbf3a5a32df10b17"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_saver.py_TestNiftiSaver.test_saved_resize_content_TestNiftiSaver.test_saved_resize_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"doc_hash": "cb06752762a4b5c2beef2dd4fb8dd21cdfe1c65499a7f6e06d826e5d2f7d00c7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_persistentdataset.py_os_TEST_CASE_3._None_128_128_128_": {"doc_hash": "2c2c17b12e2deba5422b072a5b1eb41bcaa79bf8202fb05ad90b22788e7f86e1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_saver.py_os_TestPNGSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"doc_hash": "d9e66d7bb6391b380c870ca5d1bf0c556d6db9ea20a2c49aca526e79db18982f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_saver.py_TestPNGSaver.test_saved_content_three_channel_TestPNGSaver.test_saved_content_three_channel.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"doc_hash": "393a9f2830e02146c2b21b67b052b09c88abac405bcaeae37cce8e8805b29bfb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpen.py_unittest_TEST_CASE_2._": {"doc_hash": "1b9204ac44f0049f56ef7903e016c5dd181c099c10457c6f801d4bde43354520"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpen.py_TEST_CASE_3_": {"doc_hash": "b1a4c6668bdc721e0717782467277823be4c1ee51ba8702d81ecdf74665f22fd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpend.py_unittest_TEST_CASE_2._": {"doc_hash": "e3a21708fc50a7cc61642a9a3f5ad1060665114a07bbba640eead24f2ac048cb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpend.py_TEST_CASE_3_": {"doc_hash": "74795d736384c03dd923277185e77ee8592be6d2e5b12eec0cd2407953611a2f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smooth.py_unittest_": {"doc_hash": "480324caf2ecfbecd12b0812cab0253865774d2ddbfeb0878cab6ef9dab50d0f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet_block.py_unittest_for_spatial_dims_in_range.for_in_channels_in_range_.for_kernel_size_in_1_3_.TEST_CASE_RESBLOCK_append": {"doc_hash": "e70f7cad46147380a16aeebd6f325558503ae747eca7d050c449501f50e5c87c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet_block.py_TestResBlock_": {"doc_hash": "215a7d9d851c4e3444ddecfa469628fde3eb59e0e2c88d98790b3f8214b18656"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simulatedelay.py_time_": {"doc_hash": "b2ccf1f26142c91a873463983b2662ed184dbea8f739a20bb2dd0f23759546bb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simulatedelayd.py_time_": {"doc_hash": "77af87875b52e1a337609b10d82adb559ed7c0925be2dee09691bb0f72be8d57"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedim.py_unittest_TEST_CASE_6._TypeError_dim_0_5_": {"doc_hash": "09055449c38d36386992a3251aded809b446b8a0cfa79ace6ae9e854a6fc1ec5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_subpixel_upsample.py_unittest_": {"doc_hash": "abae2e9ccff105b35e21e527432da1578d23ce0dbdc699717e3b295ad18838f6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_unittest_TEST_CASES": {"doc_hash": "62e0c005f7aad5a5f7efe1c3b0084bc4680ee297d93587d49d4e8fbcede836b1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vnet.py_unittest_TEST_CASE_VNET_3D_3._": {"doc_hash": "4848281718d42d40c7cb34ac2705eb9c5998e59bfcdfd163e89e3da6ed818f8b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vnet.py_TestVNet_": {"doc_hash": "20d91bbe687993b77799581475e5f4e0ec3db634717e66d63f4beeb10aaca296"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensemble.py_unittest_TEST_CASE_5._": {"doc_hash": "a8e7b184578fbcfcf423dd125e381ac3c93eba26cf027ea41a7812ca0126524f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensemble.py_TestVoteEnsemble_": {"doc_hash": "a589eec99bc313d609d1bc0137d350d6050ca217e27e7e58fcb2908934bd9b7d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensembled.py_unittest_TEST_CASE_5._": {"doc_hash": "deee61d060ab7b2f04cf6893621ba1cece37c2eefbed28a99ba51bb2cc431fae"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensembled.py_TestVoteEnsembled_": {"doc_hash": "f03f3215db9623ba1ef2bcc0fd759f93dd2dc5f6e5fc578556094472336c9786"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_check_hash_check_hash.return.True": {"doc_hash": "3a5d25031dbd99c6befce8a6d09280efcdf3d1047a108c4d5c02c110d7d8ca90"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_os_get_config_values.return.output": {"doc_hash": "0c674f5023677fd4aba177ac5099e9628b7d3dd687032d933254e8bb5df9829d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_get_optional_config_values_get_optional_config_values.return.output": {"doc_hash": "23cf4b85e768eff74b13817c0fcf12723d3a0f63d72bf0a06ef3957f0c24f379"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset_SmartCacheDataset._": {"doc_hash": "c06ee9b013aa3f4adab17b3e4c3f0f5e23afe458962cf9722f4d98b5922397de"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset.__init___SmartCacheDataset._restart.self__replace_mgr_start_": {"doc_hash": "fadb425a59d0c74c991e2093c0cc631dcac622db7ea00942ac5567025ba2b390"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset.update_cache_SmartCacheDataset.__len__.return.self_cache_num": {"doc_hash": "b0e5f15faddfba23c190155243fa6ed929ebdc8dcf6d478df1ef9cda05248ca4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py__append_paths__append_paths.return.items": {"doc_hash": "57deccbc1be72231882cac92e321b961047c8795b3f5c8e99392e701b5e9ee19"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py_load_decathlon_datalist_load_decathlon_datalist.return._append_paths_base_dir_i": {"doc_hash": "e85271f57471c49f9a539af0bb00fc00de2441aa834fdf1a193c293b27c29be3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py_load_decathlon_properties_": {"doc_hash": "20e188836d07b184e2d42dde32d058cac8173c26727373ca2a891afb4c9a2c97"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/grid_dataset.py_PatchDataset_PatchDataset.__len__.return.len_self_data_self_sam": {"doc_hash": "dc0092d5f79e2cb1fc5ce0c5d3a55028cf61da552fc05ee81e43c4af17bfa54b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ImageReader_ImageReader.verify_suffix.raise_NotImplementedError": {"doc_hash": "86a83333edaabe217c67f8dfd769ad3a31fc4eb4cb81e6e66e740c5c214b110b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ImageReader.read_ImageReader.get_data.raise_NotImplementedError": {"doc_hash": "a905d2c815837259fd0c9878d277ca0198e2a76f2a3b1d4e836a17f4e456a012"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader.read_ITKReader.read.return.img__if_len_filenames_": {"doc_hash": "ec8a877d4e416f281489281e52b55341e7037835c3c2bc9fe6e65a83ea2b9f54"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader._get_meta_dict_ITKReader._get_meta_dict.return.meta_dict": {"doc_hash": "8e110afdd934fe1dd8c0774d045a8b20796f1f86a7962d0321619c5ba706cf1a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NibabelReader.read_NibabelReader.read.return.img__if_len_filenames_": {"doc_hash": "5638900635e95d5b341040ca7a7552652681f5c64d45817204607e4039783a49"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NumpyReader_NumpyReader.verify_suffix.return.is_supported_format_filen": {"doc_hash": "65649d4cc36639055c55bc3003b7b8db1b6aff97144641be3162bdee4cf47214"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NumpyReader.read_NumpyReader.read.return.img__if_len_img__1_els": {"doc_hash": "2baa55365d4d6c74b564e5aaa7903e1b6ec11b4ead4c45ed5d54ade82f773f5f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_PILReader.read_PILReader.read.return.img__if_len_filenames_": {"doc_hash": "7303a25969b18408ea94b878778e3e81de2ce3cc83b08d42670b9734d88b425a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_get_random_patch_get_random_patch.return.tuple_slice_mc_mc_ps_": {"doc_hash": "446a949aff51d98b033d9acd957f915cd3f5ca7832b70fb88ce6ce50d0246c83"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_compute_importance_map_compute_importance_map.return.importance_map": {"doc_hash": "ed59086d957c0330d61484cac12920c1f886747da0bf31a58472a1d2d8164206"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/smartcache_handler.py_from_typing_import_TYPE_C_": {"doc_hash": "f01c41cda6fba526400a87bac25f2187958fafaa3daf4fd1d2822b9e1e796e85"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/hausdorff_distance.py_compute_percent_hausdorff_distance_": {"doc_hash": "d62a18425109208b3ac2cb6078fe469723f6f841f11945051fb93bdf8722acb4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_get_mask_edges_get_mask_edges.return._edges_pred_edges_gt_": {"doc_hash": "71b9fe6264e9bcf311f1def9c461949669d3e5895b13bef0760e6056d7ba9996"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_get_surface_distance_": {"doc_hash": "fccc7121b66a3bafa40405f5d68d59b91941ff6a55ee0c28c69cf9dc646164f3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_from_typing_import_Dict__UnetResBlock.forward.return.out": {"doc_hash": "f1baac9efa56dd538f0acc8f0f81c488047ccc0bd98e34a52c9c130c564bfb9a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetBasicBlock_UnetBasicBlock.forward.return.out": {"doc_hash": "d6e90afef49c72bab28e9a2048ebb602dfc94f522f4047e7207d82a8d481b07e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetUpBlock_UnetUpBlock.forward.return.out": {"doc_hash": "9a8114d1e95b18180e9561511f1c4f2625553df8669bcb5a4081d64b4080efa5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetOutBlock_get_acti_layer.return.act_type_act_args_": {"doc_hash": "eea1e27c01b2d5cc58f796e1e6e06cd2f7db3f1c26816128e054290ae457e308"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_conv_layer_get_conv_layer.return.Convolution_": {"doc_hash": "48f2b6a876fce2e836ac759072cd8a1e6c7890c6c4bcc695a4c43bdc625036ef"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_padding_get_padding.return.padding_if_len_padding_": {"doc_hash": "b7cd787d4e7a0f9dec0321ddada05b3a291dd4f259fbead79702f938ccfab0ef"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_output_padding_": {"doc_hash": "9e18d678d131e16c193b6ba9bfbe53b7ca08a2402ebbbb4ed71b785be4b24224"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/segresnet_block.py_get_conv_layer_get_upsample_layer.return.UpSample_": {"doc_hash": "4bfd9bcdd198264af7809c27ff9225273db51137de12f71659ce853fcc0731ac"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_from_typing_import_Option_UpSample._": {"doc_hash": "ea61f348486eda36144abeb4598cda3bb1e2ef621c99713a923ff0c8d5626339"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_UpSample.__init___UpSample.__init__.if_up_mode_UpsampleMod.else_.raise_NotImplementedError": {"doc_hash": "645bcee45066a9da4138516293e90ea5ccffaeebc72e1792ec344ec7a6908d0b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample_SubpixelUpsample._": {"doc_hash": "84a74811cb4e9ae35e3680bd2d029d43a15829751da8052c4317e51c150b7404"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample.__init___SubpixelUpsample.__init__.if_apply_pad_pool_.self.pad_pool.nn_Sequential_": {"doc_hash": "958e7fdc4a6d3a5bdfce4930ed27fb058a7b4c739d39b14b2879db20c8eeba36"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample.forward_": {"doc_hash": "f7a99ff63c3bc29942135027efaa64cef1f00498ad8839eba087f08f5c8779a2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_from_typing_import_List__same_padding.return.padding_if_len_padding_": {"doc_hash": "c283d6dca67b34f0f7fa52e9b016fb13f5ce5427a73d2491b55d5aa322e69c30"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_stride_minus_kernel_padding_stride_minus_kernel_padding.return.out_padding_if_len_out_pa": {"doc_hash": "06ff87118324485fcbe3e1a4231c8dacdf045131f33b1c8115708a3f8f3a853c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_gaussian_1d_gaussian_1d._type_ignore": {"doc_hash": "d28d374e8e8f3285a32a90419ccabd3bfb8b2e3d6d494f469bfc0fe78c317ba9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_polyval_polyval._type_ignore": {"doc_hash": "1236f41ab6f04cc3c0fcdf05e119e782f609af4fbb7609e6ee95f2fa146465ee"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_0__modified_bessel_0.return.polyval__coef_y_torch": {"doc_hash": "b3ba14f54b6a7bd18af4ae492971b6073ae8786a43e293a7ee9d8672d807c00b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_1__modified_bessel_1.return._ans_if_x_0_0_else_ans": {"doc_hash": "7d8015613f7924e5a99c829f1d253f71f32f2e5a9e22b2b5f5d9c959aafbe72b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_i_": {"doc_hash": "efe26237e698f73f0823472daf5c9a9d499512187fbe050420af276662c60070"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_GaussianFilter_GaussianFilter.forward.return.separable_filtering_x_x_": {"doc_hash": "374e16cd3309d31cafc1391c3a3f6299518947f14cf86e9bf337fb90c671f432"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_pull_grid_pull.return._GridPull_apply_input_gr": {"doc_hash": "57386c6316a2ed789e39dff6911736cab29ddc7be5c2dfc71ddbada36ee1802c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_count_grid_count.return._GridCount_apply_grid_sh": {"doc_hash": "ce8b67d04ef7de08163285b243ffba11588977ad5f6917493bcb2948edb46a02"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_grad_grid_grad.return._GridGrad_apply_input_gr": {"doc_hash": "af94b58d90e46d413c38452f0941f8b45db0e7da549080821440a77f9ed944c6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_AffineTransform_AffineTransform.__init__.self.reverse_indexing.reverse_indexing": {"doc_hash": "79afb969ebad2111d97300aec0b999f688fd8a929d7ab13d82e49f033f70daf1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet.__init___AHNet.__init__.if_pretrained_.self_copy_from_net2d_": {"doc_hash": "0fe0c5556ebce4ef6176975abbb22ae596e5aaeb5c0d3892400ba8b3f5bd4b2d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_re__DenseLayer.forward.return.torch_cat_x_new_feature": {"doc_hash": "5f4a7e44ff3679797c84a88631d24320d2347a7d27469aa089fe1583819b1baf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.get_module_list_DynUNet.get_module_list.return.nn_ModuleList_layers_": {"doc_hash": "df43e668083b13e2d85eba72a63c244fe62f02121ab9012f36852713c4652cdd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.get_deep_supervision_heads_": {"doc_hash": "559f2d8f6e581e6fcb367a4a98493f0bcf02494def977d81f1a43046f9a4cfc1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/novograd.py_from_typing_import_Callab_Novograd.__setstate__.for_group_in_self_param_g.group_setdefault_amsgrad": {"doc_hash": "88cc957525d8b003bd2ac0bd6e40846d99e259fd375d9455a95bed9244a51c4f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/novograd.py_Novograd.step_": {"doc_hash": "dad26db0230cd3487dc8b84fa5e7491860d4b4aa10b0eff5cc6a3ecc591328ce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel_RandCropByPosNegLabel._": {"doc_hash": "cb4bfbf9381081c911575e4c7273c7e2a95ab410a818d4e9e06cabad07eb7767"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.__init___RandCropByPosNegLabel.__init__.self.bg_indices.bg_indices": {"doc_hash": "2c97b37013cc5b850b79f179a2f955262d0f1ef8315cd1a5ae67b713907b8ab9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.randomize_RandCropByPosNegLabel.randomize.self.centers.generate_pos_neg_label_cr": {"doc_hash": "67f88865471b7b5fa57073f2b79059d115cb1e4f307e0d983f4ad922c6b10879"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.__call___RandCropByPosNegLabel.__call__.return.results": {"doc_hash": "91d7a85255662b08863d4f7996d8510b3dca4b2b72e590250459062638ac6740"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_ResizeWithPadOrCrop_ResizeWithPadOrCrop.__init__.self.cropper.CenterSpatialCrop_roi_siz": {"doc_hash": "91a8d306510bd90b2f2622d342b6fd6cfe61c1a5f0bdc0e441d95c5ffd16ce3b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld_RandCropByPosNegLabeld._": {"doc_hash": "8bfc5cd955c6f34aa8c929833b767281d33d08cb5e6864753c1c4a5974a77e52"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.__init___RandCropByPosNegLabeld.__init__.self.centers.None": {"doc_hash": "ed51c4ad912ecf736fea6311914cb4ffcac34626f3d149506026694af013f065"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.randomize_RandCropByPosNegLabeld.randomize.self.centers.generate_pos_neg_label_cr": {"doc_hash": "282f4d860212e0a885a163fea6b927ab776290058d798099cb14d39ab9250f2b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_NormalizeIntensity._normalize_NormalizeIntensity._normalize.return.img": {"doc_hash": "6dbb596dd43e400c77a307131fcea879b83f9417f7c67791a57262dd6187b6c8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSharpen_GaussianSharpen.__init__.self.approx.approx": {"doc_hash": "6f8cc7cda877068ca38635f6b71d777de76e79195a34dcf1bd64247f1e026351"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSharpen.__call___RandGaussianSharpen.__call__.return.GaussianSharpen_sigma1_si": {"doc_hash": "b6912a947f3d787c750f4a9b28247cc464246dbac652898f9128cc313ce33fc7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandHistogramShift.randomize_RandHistogramShift.randomize.for_i_in_range_1_num_con.self_floating_control_poi": {"doc_hash": "69f4e6a8f18f3340088e1f844406ac7a699f6cb03586f20151413cde4c422242"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandHistogramShift.__call___": {"doc_hash": "280dfab53c75c8a5a92792d17b722afd59067fb89f3c1d3b7ebbdfa40c1ccd82"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandHistogramShiftd.randomize_RandHistogramShiftd.randomize.for_i_in_range_1_num_con.self_floating_control_poi": {"doc_hash": "99096fa892da7ff89409723424686581915a75262ffcca1cdab0f1a8acbfbbca"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandHistogramShiftd.__call___RandHistogramShiftd.__call__.return.d": {"doc_hash": "38aa80eefcbb0eaeb2582031c0faebabb9ebc4f38ed9b6794f44e6a82c6772cc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_LoadImage.__call___LoadImage.__call__.return.img_array_meta_data": {"doc_hash": "d1b674a64c85c61734bd48414ab0f479d783b6c0dd07331520ad249933142023"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/dictionary.py_LoadImaged.__call___LoadImaged.__call__.return.d": {"doc_hash": "6e7adeefc3e29f1031d31131ac7f4189432fa72169c3268a7a779a296aa9fbe1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_LabelToMask.__call___LabelToMask.__call__.return.np_any_data_axis_0_keep": {"doc_hash": "1eb528527e7157ad7923d4cb63317e1ffe1af7ec3736673808e53ca201db1c2d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_FgBgToIndices_FgBgToIndices.__init__.self.output_shape.output_shape": {"doc_hash": "5d461fe29c1b235e2186bbf30659b7a4f356b76ab8186fff4755af77e03dab48"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_FgBgToIndicesd_FgBgToIndicesd.__call__.return.d": {"doc_hash": "c7d87ad44149fedec77922ee947774b76925e72a26c635d42dd4b2c10765689c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_map_binary_to_indices_map_binary_to_indices.return.fg_indices_bg_indices": {"doc_hash": "b87c523648c34a6fe84c28745eacfb5b7fe6760a676eb62b5ca291862659f4e2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_generate_pos_neg_label_crop_centers_generate_pos_neg_label_crop_centers.return.centers": {"doc_hash": "cf213edcee0de658f239ea34335647cf3a09950d7155092a836991a189a5d496"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_set_determinism_set_determinism.if_seed_is_not_None_.else_restore_the_orig.torch.backends.cudnn.benchmark._flag_cudnn_benchmark": {"doc_hash": "1d8dc6788eed614d78582577a80dc1c35762f2ae2fd48e31c06bd111055c64de"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_load_submodules_load_submodules.return.submodules_err_mod": {"doc_hash": "7f3ab646f79f142da160448aeda23c0e1e02a2c8f1e51772eb279568c4f6d233"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_glob_try_.finally_.print_f_BUILD_MONAI_CPP_": {"doc_hash": "8d453dadd4ac1b10ad4a8c6cb3236b729a0b83e9f7afa7e86a90d5f20ea7a072"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_torch_parallel_backend_omp_flags.return._fopenmp_": {"doc_hash": "4dc79ca5b20e2736d059f38302bca60bc6c32a89b19ed43625f26ff04281d9da"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_get_extensions_": {"doc_hash": "0b9869b904b5712babdbd52ce796eb9c69eebaf0588e68dffb1ebefab73fd6b7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/clang_format_utils.py_os_CLANG_FORMAT_PATH.os_path_join_CLANG_FORMAT": {"doc_hash": "fd373a1066c6f88b5463a9ceced662c2654aedbdfab85a3b05b6d018fc5fc9b3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/clang_format_utils.py_get_and_check_clang_format_": {"doc_hash": "aac29657bc644b410bf51bfa01c61ab993b485e9e0c59509556579686d144a62"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/min_tests.py_if___name_____main____": {"doc_hash": "9810ec74878878ef503ec5da45a3992c0101755c14de8ebef0bfd6d314759e81"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_check_hash.py_os_TEST_CASE_5._b4dc3c246b298eae37cefdf": {"doc_hash": "3b95f1b2d4fb385423ad7fa3295fb356e19c202f509bccc4f2a0283ad86ea9a3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_check_hash.py_TestCheckMD5_": {"doc_hash": "aab10397daeb9753726e918b32842d7368c0a091803a31cd7e4a2cef58118cb1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_sys_TestCompose.test_dict_compose.self_assertDictEqual_c_": {"doc_hash": "d3a6a52a8852709b0be5ff205ae82879722449fbd7e29f682c810569a9196390"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_random_compose_TestCompose.test_err_msg.with_self_assertRaisesReg.transforms_42_1_": {"doc_hash": "2485d3f2ed220eb85a0edd5cfe97470b7a0cb186cf71907a5e9bb5f3e41d1b4a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_data_loader_TestCompose.test_data_loader.set_determinism_None_": {"doc_hash": "d8e86b3f059287eb29542891ca5ada2e8cba052eb35d6e659874b2da94b1b2e8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_unittest_TestConvolution2D.test_transpose2.self_assertEqual_out_shap": {"doc_hash": "cf14830971fbd1eeeb8de1092a980c45797fc88decc4278e1cc7afee69b82e68"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D_TestConvolution3D.test_conv_only1.self_assertEqual_out_shap": {"doc_hash": "07c2326bd20d06c09a8315432b759242edcfda891d7a8ba1dbad203b238d3701"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D.test_stride1_TestConvolution3D.test_stride1.for_strides_in_2_2_2_.self_assertEqual_out_shap": {"doc_hash": "865b420626de39c1d79e1bb55cfffa11faf028eedc356aa3977ecfa5863ac41d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D.test_dilation1_TestConvolution3D.test_transpose2.self_assertEqual_out_shap": {"doc_hash": "ff2d7525765c03de93404d28fb18da114101822440549f0842bdc6eb1b3afa4f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foreground.py_unittest_TEST_CASE_4._": {"doc_hash": "fc4ee2459b19412f11247ba910e9d872d5507ae5e5ede4166b7e39a23437abbe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet.py_unittest_TEST_CASE_DEEP_SUPERVISION._": {"doc_hash": "a7d3cd0b86c91de595910fabbf7f92042a31a5600effda322045204283a794e9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet_block.py_unittest_None_1.for_kernel_size_in_1_3_.for_stride_in_1_2_.for_norm_name_in_batch_.for_in_size_in_15_16_.TEST_UP_BLOCK_append_test": {"doc_hash": "fde57f88b56ee6ba51ccb20e46b44083eaa046976fd514fb952fe4e6a3df9bd4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indices.py_unittest_TEST_CASE_5._": {"doc_hash": "9f39024bc7d4aa7caaa279fd76fdb31b773bce3a5d4b0a31a6c9a8bc1fd43ab8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indices.py_TestFgBgToIndices_": {"doc_hash": "e7b2be66ae66dd1a3481f2218fa675d9568214669aa8b51adc0f293a2a1b1333"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indicesd.py_unittest_TEST_CASE_5._": {"doc_hash": "1daa07aa36769f5300db71f7532916298408317500faece960f4f17aa8b0bc7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indicesd.py_TestFgBgToIndicesd_": {"doc_hash": "444b49e3bde1b9ffd642334b753afa5d050066155594ff98606c064bccd0fcfd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_unittest_TEST_CASES_NORM_F": {"doc_hash": "60884e489cf69562bb14ac0a506be7be7337c0cc7bf3cd1a26c612f5bd549586"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d_TestGaussian1d.test_gaussian.None_2": {"doc_hash": "26fb036e2e206f9f5660bad88615e35c44ec9e64c5aa99c526202be4b0167acc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d.test_scalespace_gaussian_TestGaussian1d.test_scalespace_gaussian.None_3": {"doc_hash": "1281f47f99436b0b12d3daadc97df6a7f6b703641019ff1444f29c2dd0d50789"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d.test_norm_false_": {"doc_hash": "154d7967035c9b87e8deb0f8f314e865ac32bd6e3e71f2d594efe8c8393c6f27"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase_GaussianFilterTestCase.test_1d.np_testing_assert_allclos": {"doc_hash": "d91ef843228182c8e4468c48c467040f1130fcd671118e4565400b615ef5bd03"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_smartcache.py_unittest_": {"doc_hash": "d3673a58c9d2d584f4ca6dd3fd3dacbc773ff387f919b502f3ad090d36868585"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hausdorff_distance.py_unittest_create_spherical_seg_3d.return.image": {"doc_hash": "81d79d881bc54b18b53c4f8d1e5c2867b300e6c6c54533cf9d076d740273cced"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_is_supported_format.py_unittest_": {"doc_hash": "9aa6a26e5f59af34b0028f679b3e46201b6560ad7a6a03d5238e0c4bb6dd3fda"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_json_TestLoadDecathlonDatalist.test_seg_values.with_tempfile_TemporaryDi.None_1": {"doc_hash": "39fd8cb0eb5b089cdf71da4063ca63d0cbcadec1e8b61225d22f49767f21b1be"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_TestLoadDecathlonDatalist.test_cls_values_TestLoadDecathlonDatalist.test_cls_values.with_tempfile_TemporaryDi.None_1": {"doc_hash": "ee9d54840a10548ab4c97b92073c06c5e59eae2e00a8c6d9284183978ae7663c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_TestLoadDecathlonDatalist.test_seg_no_basedir_TestLoadDecathlonDatalist.test_seg_no_basedir.with_tempfile_TemporaryDi.None_1": {"doc_hash": "c1b28ba1510c11011c19f2d64231aa870b34acbd01b3871cf8c7d5e4f216e03b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage_TestLoadImage.test_nibabel_reader.with_tempfile_TemporaryDi.self_assertTupleEqual_res": {"doc_hash": "047291d0b361a1bf483fda0e9ee901a88f5da04aa2b1c3c3b340dfe66433a22f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_itk_reader_TestLoadImage.test_itk_reader.with_tempfile_TemporaryDi.self_assertTupleEqual_res": {"doc_hash": "0d9fc409fefb9ff53c4d7466c9486745eec0edd37f72c5616c88f7320620e913"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_itk_dicom_series_reader_TestLoadImage.test_itk_dicom_series_reader.self_assertTupleEqual_tup": {"doc_hash": "0156546c2d5bb18ec8e2b4a134b33a8fb10004494041c8cdd29542ca0506be34"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_register_TestLoadImage.test_register.with_tempfile_TemporaryDi.self_assertTupleEqual_res": {"doc_hash": "ec3f349b416136cadacf3d32b1287ab8d19f7f20ae2653a83a09daba64419db8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_kwargs_": {"doc_hash": "6e6c6f90b773cc2d40302ee4bdbe6f9a625417be29f9e2c6bad3c135276053a4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_imaged.py_TestLoadImaged.test_register_": {"doc_hash": "43109bc01da8fcda3ac58247a851b586c03937c016100cd8f0d4a5824479aa6b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_binary_to_indices.py_unittest_": {"doc_hash": "36f02ef7feea2decb4620ff44ec413dbab5096f34fbdbb30267cf11848373ea2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_unittest_TEST_CASES._": {"doc_hash": "f2cfdec05d91df1fc868b0c8c7743f5dec70c456eabaf2e3235d7c71a7b883d7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity.test_channel_wise_TestNormalizeIntensity.test_channel_wise.np_testing_assert_allclos": {"doc_hash": "7f29e537dddb6f899c12da2fab30e52a4f671422a9aaa3040342b182bc25036c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity.test_value_errors_": {"doc_hash": "67b6f954862ce48c340aaa4a0b359c6f452edee18e646cacee33477f0819ad76"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_os_TestNumpyReader.test_npy.np_testing_assert_allclos": {"doc_hash": "54efd80cb87381edf9734e1872eb993c3f4270fc8228508cbdfb2f043de11a20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz1_TestNumpyReader.test_npz1.np_testing_assert_allclos": {"doc_hash": "ac19d4e641176d0cbc1e40ce28a74c7d818d5988ea38634bb3604051b54613fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz2_TestNumpyReader.test_npz2.np_testing_assert_allclos": {"doc_hash": "c38cc1c21964f91985753d3522e8bf8facb3fdff77bea02dd19b5af85e2e659c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz3_TestNumpyReader.test_npz3.np_testing_assert_allclos": {"doc_hash": "aae09936f4450c67dec4cb99cf98a3130817f46c9130a150e2bcedca8a29d2c0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npy_pickle_TestNumpyReader.test_npy_pickle.np_testing_assert_allclos": {"doc_hash": "fe9d116790703712d280849dde0035cdf9fc60aa1832b3678e60b2d7babac039"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_kwargs_": {"doc_hash": "47e8150080368064bc151f657f88bb8e1f1e7bb7ca46b2c18ff58507d679b565"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_unittest_build_test_cases.return.test_cases": {"doc_hash": "f6280697235f584768840e772782ec548a2ff18bec76ff62f797377611d94f47"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TEST_CASES_ALL_if_torch_cuda_device_coun.TEST_CASES_ALL_build_t": {"doc_hash": "3244468cf616e86cd150beaecf7360bda2291f2cd383a3db8a54c9cc7134f245"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TestNovograd_TestNovograd.test_step.self_assertLess_fn_item": {"doc_hash": "4eceeffe3916bbbd203a2b1f0b02b1fde8ad9f858f30006ac4374da0ffbb5f22"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TestNovograd.test_ill_arg_": {"doc_hash": "576f810224f2f4253b9379c8e2da401366c579e79e57c63ce4c9611a0975ecd6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_parallel_execution.py_unittest_TestParallelExecution.test_single_gpu.trainer_run_fake_data_str": {"doc_hash": "4d6d616e54250dd4bd14d8fcf37494edba3a09ed13c1da190420ba87e8edc999"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_parallel_execution.py_TestParallelExecution.test_multi_gpu_": {"doc_hash": "198483bdc3b3ad6d94a3840ab39fab49d790c143b974f0490c14422b56c21cca"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_dataset.py_sys_TestPatchDataset.test_shape.self_assertEqual_output_": {"doc_hash": "c1cef32dab782c4a77bb27c3d596989fbbfa113b74b29dc188bfe630ae66d19f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_dataset.py_TestPatchDataset.test_loading_array_": {"doc_hash": "3d2f4ce15cf4dc4e878575bfd75d9a2711a082bc09711ef9f16b8aa3f11137d8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_os_TEST_CASE_7._128_128_3_test_im": {"doc_hash": "b521820d36e3032646abf41ce0cf46a8e052d7279a5bd4481d8e97ebc478355b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_TestPNGReader_TestPNGReader.test_shape_value.if_result_0_shape_tes.else_.np_testing_assert_allclos": {"doc_hash": "469b633744b7d84b9dcbcaa9b480c09086eee40993e111f0091d04878921400e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_TestPNGReader.test_converter_": {"doc_hash": "ae4cebc8e64ca41007534a58afb82b36e042099495d71aff0bd1a779ad78bdc6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_polyval.py_unittest_TEST_CASES._": {"doc_hash": "c3dc08a059582677b83251189066938271bcd1262746d3471fd005f1f3d5a373"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_polyval.py_TestPolyval_": {"doc_hash": "21c0d1f7d4c1f225f1180b86f4604b0e4b1e53a4f0769bdbe3763f72788e5464"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smoothd.py_unittest_TEST_CASE_2._": {"doc_hash": "033eeda86078db12f85bdce8e9fadf82694f9c223f85af0437409c783d37e6d7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smoothd.py_TEST_CASE_3_": {"doc_hash": "c8d8e6a1fe70d8aa8b65e6b6d758fb8566fb4202958e6e63c61050aa6d05ee2b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shift.py_unittest_": {"doc_hash": "6ecc866894e6c08d0a99c0018689827c487899712f181c9edc4d560b78ec930d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shiftd.py_unittest_TEST_CASES._": {"doc_hash": "bc1f54bf3b2377cf5ea685f0c0a02b9714cecf030167400f4d3050564c062ee5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shiftd.py_TestRandHistogramShiftD_": {"doc_hash": "6c0397ce13526c2c658ffdd4e1946e55df4caff1a3f67712c8b499ba9d6ca601"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_unittest_TEST_CASE_1._": {"doc_hash": "ee3bb8b85db0e443a9e7804832b4f3d46dc54dbc5336f7c87922e00cccde1257"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_TEST_CASE_2_TEST_CASE_2._": {"doc_hash": "4fa2ffcb3e0adc4f68b65fc94f9f2e02517e040953dc1b9d0301cdba9486c856"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_TestRandSpatialCropSamples_": {"doc_hash": "ee2c17492692f1133d50b98bf3b11c49759e93f10d784d56deb7cc16ab808532"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_unittest_TEST_CASE_1._": {"doc_hash": "42b7550543e226e5561b4587139e3c91c4898db746d58595456b047cd4828250"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_TEST_CASE_2_TEST_CASE_2._": {"doc_hash": "de024b043735aa5297ecb5c03d1b38fcbaa10aba3593eacce016163452b7cfcb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_TestRandSpatialCropSamplesd_": {"doc_hash": "df5910eb254e7652dd95ce45cb2ba244c6797160a76e91b99f10dd2aed106290"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resize_with_pad_or_cropd.py_unittest_": {"doc_hash": "9a96ba190dcee67ad100a4329a58ad23c04242e55c336a81c4afc4739e3e5e28"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_senet.py_TestPretrainedSENET_": {"doc_hash": "c4852b8bfa390d0f109af3b496d59b20e51c873f6589917393824bf48957faf3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_unittest_TEST_CASES._": {"doc_hash": "272ef66d15af92b4c108e7db6b0a893eb1c263175aa6818b59ff4ae4c741c1b0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference_TestSlidingWindowInference.test_sliding_window_default.None_3": {"doc_hash": "210539bc196cc8c753488326d61b63d48e407e223d0329fffbcd3f255e2908be"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_default_device_TestSlidingWindowInference.test_default_device.np_testing_assert_allclos": {"doc_hash": "50d262b29cbc943cd7c75390a52d14dd115b3843b5c18ffc589dd180a4019a96"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_sigma_TestSlidingWindowInference.test_sigma.result_6.sliding_window_inference_": {"doc_hash": "6f9df54b079a44a7bdf231090fcfad4587b75402213c68e113ec213b2c436305"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_sigma.expected_7_TestSlidingWindowInference.test_sigma.None_3": {"doc_hash": "420c113b08dec69b22172a989ec24e7d68eaca2f523ac2f912003e10280b842d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_surface_distance.py_unittest_create_spherical_seg_3d.return.image": {"doc_hash": "7144c1d7319a8b9c4c3d052c9d7d8378e3897d9492e45f5dfc9c674aca7b7e26"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_upsample_block.py_unittest_TEST_CASES_EQ._": {"doc_hash": "ca2da0657731ce964ca0c4c932a20c64a4b8e5c7220d450ae61c13d0337d6e12"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_upsample_block.py_for_s_in_range_1_5__": {"doc_hash": "c9d26dbb70a8589e53f9cbf5a96f1469f55a281bcde0f949f747b8fca8dc73e9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom.test_keep_size_TestZoom.test_invalid_inputs.with_self_assertRaises_ra.zoom_fn_self_imt_0_": {"doc_hash": "ef104c2589bc5c57ce981a44854ef38182e7ecdeaa89edd7bc02b552fa1921ba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom.test_padding_mode_": {"doc_hash": "6dc9b2a935fba654edb73a6cabd772d7075e2c7eb738c1a48277f47518ba17d3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_pre_render_pep440_pre.return.rendered": {"doc_hash": "63a806c3f81301b906859105a3d68df9a7dc14e614c39edc6264b8b0aa9448a3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_post_render_pep440_post.return.rendered": {"doc_hash": "935b462cffa34a1800c0d952ae8b061e8a5f4a304dc06ae969e2dfe80d2fe01c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/__init__.py_CrossValidation_": {"doc_hash": "18ea12bbcc40eef31c21cfa96cc60d6a149601dd38fd2a717f01eea9dfeef470"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_os___all__._MedNISTDataset_Decat": {"doc_hash": "2560edb183cf998166aa1eacd145106df664fca0e67114de74c0a4cebc51af59"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_MedNISTDataset_MedNISTDataset.randomize.self_R_shuffle_data_": {"doc_hash": "dcb5a6f80a477b8349d3901573251b3b89016430c6af7e395837dc359da95440"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_MedNISTDataset.get_num_classes_MedNISTDataset._generate_data_list.return.data": {"doc_hash": "c50f724073ce7ee0111080f5e40535790be5b1ce87bd15270c36c18085c749cb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset.__init___DecathlonDataset._split_datalist.return._datalist_i_for_i_in_sel": {"doc_hash": "0bf976eb7159c30e25e1b19acbf3e2c24e516a9821815938d9c9f494c6ffaa2b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_CrossValidation_CrossValidation.__init__.self.dataset_params.dataset_params": {"doc_hash": "1465e295c8cb58d19c2eb85cc3a909e8e51136b8676a383278bacc1d9c8b82d2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_CrossValidation.get_dataset_": {"doc_hash": "dbf9328b81fc4b3723e1be3004437f99f5453794dba249a60b1b696c1e1e30f1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/__init__.py__": {"doc_hash": "a7945525415783e16b1f915e01a13899df488942373ffccbf12a9f8884db378c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/dataset.py_logging_create_dataset.new_datalist._": {"doc_hash": "c54385e156ed86f265f797c59bee159994399bca5facb27701208598db7a4945"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/dataset.py_create_dataset.for_idx_in_range_len_data_create_dataset.return.new_datalist": {"doc_hash": "f230be101780ef0f7fd81de6c6e4b032c837902e100f6089cc2c48b1d4d646e5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/dataset.py__default_transforms__default_transforms.return.Compose_": {"doc_hash": "f7623ae781ea04322a74c3b762d850c51319e810986d2c710b9279e11ff698fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/dataset.py__save_data_2d__save_data_2d.return.data_list": {"doc_hash": "acfa69652e831a0bd5c8dea38d34fbc3a0a32713c83c5cb6fe744cc9d65a5dc9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/dataset.py__save_data_3d_": {"doc_hash": "03465f880fa38a265808985ba37b91556f680564d8e2d5b0dd7d6a5b1a34603a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/interaction.py_from_typing_import_Callab_Interaction.attach.if_not_engine_has_event_h.engine_add_event_handler_": {"doc_hash": "22166a918dccfce397c37e967f3ea231a9007ab000da4cf77407cfb743d516f5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/interaction.py_Interaction.__call___": {"doc_hash": "f07712a51fd4ff131c86de3b99b8fb4c6b0c30adb1464fe982f5113ea3f58e56"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_from_typing_import_Callab_distance_transform_cdt__": {"doc_hash": "ba746a7bf8111d2912e40cd875b6720a14ae414abdfa222ccbe3522829391ea0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py__Transforms_to_support_T_FindAllValidSlicesd.__call__.return.d": {"doc_hash": "afe5091062d972af0702edfd59d04af553ce4d086801129eee76be1005c4ed31"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddInitialSeedPointd_AddInitialSeedPointd.randomize.self.sid.sid": {"doc_hash": "80884ed4145b2c145a347bb428b610a2080b40c097d48e8752d1d1cf61e28b9f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddInitialSeedPointd._apply_AddInitialSeedPointd.__call__.return.d": {"doc_hash": "bbd87bef4fff764a98419f90423da96aa7f8bf71b4c76dfe2bb5b32e1c09b2f9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddGuidanceSignald_AddGuidanceSignald.__init__.self.batched.batched": {"doc_hash": "4e2ff26d9ddb36fd9ecf7a02f4a298485d790340f8dcf0e8038ab7190afcf9ab"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddGuidanceSignald._get_signal_AddGuidanceSignald._get_signal.return.signal": {"doc_hash": "d2687ca84361497e2dfbe48bd3dfbd3bac73ffa6aabd6127cad2da1d42bfca7c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddGuidanceSignald._apply_AddGuidanceSignald.__call__.return.d": {"doc_hash": "2333df47a31f9d81a4388adcc514a950b8f0ad3720e4a92b982d90c7c7a20ef4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_FindDiscrepancyRegionsd_FindDiscrepancyRegionsd.__call__.return.d": {"doc_hash": "aef5fbb04e7752f6ed096f4069a50c6eac14f3eea9cd2eb8f7898623ae9df66c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddRandomGuidanced_AddRandomGuidanced.randomize.if_not_self_batched_.else_.for_p_in_probability_.self__will_interact_appen": {"doc_hash": "9dcd12fd596bd5b19cf807e541548ed6e16ec0da7d7820a67883fe6f69500e1f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddRandomGuidanced.find_guidance_AddRandomGuidanced.find_guidance.return.None": {"doc_hash": "e8ecab5ed129b619373fa9ff5d12a714f6ce92a5a39204e8ba852edeae5c8874"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddRandomGuidanced.add_guidance_AddRandomGuidanced.add_guidance.return.None_None": {"doc_hash": "df091469f86bede1f73081f353bc74cdfefb14bd03d0f40f6117956dbb747bb0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddRandomGuidanced._apply_AddRandomGuidanced.__call__.return.d": {"doc_hash": "c47bc5969dda90e13b572d7244d70fca9e3507b757b8748e0bd9c456e522e4b0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_SpatialCropForegroundd_SpatialCropForegroundd._": {"doc_hash": "4b67694cb5d6309f0ccee0cb9dbbac70f34013f227c5212a720eb553bc6b0801"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_SpatialCropForegroundd.__init___SpatialCropForegroundd.__init__.self.cropped_shape_key.cropped_shape_key": {"doc_hash": "d0ee593083419f64ea48db9e9a1a5b25c8622a60a7d4128932c7f15dd583a976"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_SpatialCropForegroundd.__call___SpatialCropForegroundd.__call__.return.d": {"doc_hash": "a15def20744a6736d957d374b2b2b59f28c8b81b4cfebee39552641e8aa432cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py__Transforms_to_support_I_AddGuidanceFromPointsd.__init__.self.meta_key_postfix.meta_key_postfix": {"doc_hash": "ab4d90953ebf37b45502439c9e5be18998c30cee0985d4e705bb141d65da2bbf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddGuidanceFromPointsd._apply_AddGuidanceFromPointsd._apply.return.guidance": {"doc_hash": "ad469f67f1a1c9eaa3ffe6c30aa1d46a7a3280b7f35fcf0be3d6e4c9be8c9e90"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_AddGuidanceFromPointsd.__call___AddGuidanceFromPointsd.__call__.return.d": {"doc_hash": "880ffd8597ccab7bc2bf0220092b9babfb03f6fee82edb08546106a091790b25"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_SpatialCropGuidanced_SpatialCropGuidanced.__init__.self.cropped_shape_key.cropped_shape_key": {"doc_hash": "508d9ddbe5b0c777aca0f6745de1de996e5af1741f5c90ce2913cd68e12d44b8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_SpatialCropGuidanced.bounding_box_SpatialCropGuidanced.bounding_box.return.box_start_box_end": {"doc_hash": "2d16b9ddb035dd9f352d070301740f1627bfea779f9f5240a67ab55cdb8ff5ad"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_SpatialCropGuidanced.__call___SpatialCropGuidanced.__call__.return.d": {"doc_hash": "4905d5bef901db560e5d32263355be87bf75b753c64a8dfe843a827cb9678e2e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_ResizeGuidanced_ResizeGuidanced.__init__.self.cropped_shape_key.cropped_shape_key": {"doc_hash": "111afd219dce4b8998d4f9c60ab4c20e56abaec5203c4fff012242b19108956a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_ResizeGuidanced.__call___ResizeGuidanced.__call__.return.d": {"doc_hash": "d1f81a062844053dcb9818ed406e56960c6fee35a84beb205669298948e85c3c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_RestoreLabeld_RestoreLabeld._": {"doc_hash": "f71d23b651d5c848ed9517fb712799ce5adb6499ea32500baecffef582ce8f9e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_RestoreLabeld.__init___RestoreLabeld.__init__.self.cropped_shape_key.cropped_shape_key": {"doc_hash": "bc2ebe64e5b2f05001ac85f3c6caefa87692dfe9211ca8756b4ca352721a57e3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_RestoreLabeld.__call___RestoreLabeld.__call__.return.d": {"doc_hash": "578a3b0bc881dd19fc5388c17b77d1d6ac5c14b3b8cfa2f2f22f4e43c56a4c30"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/transforms.py_Fetch2DSliced_": {"doc_hash": "b350318f33d72cc038dde56b33e40315b2e3d5f4fa27c67b132060320e144205"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/__init__.py_MaskedInferenceWSIDataset_": {"doc_hash": "8a0ab6ba7d54a3eedd602c8801929c2bfc080d0889f4e40e1ff5cdcd94999031"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/datasets.py_os_PatchWSIDataset._fetch_wsi_objects.for_image_path_in_self_im.self_wsi_object_dict_imag": {"doc_hash": "a6d6f2426b207431862f4753e7ec13f8617381a11eb7b43f16ccdec42eb015c8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/datasets.py_PatchWSIDataset.__getitem___PatchWSIDataset.__getitem__.return.patches": {"doc_hash": "3926e2e8d7f0f5f279af283aa906d2dd6b06120654fd750a7f76c808cb56d59c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/datasets.py_SmartCachePatchWSIDataset_SmartCachePatchWSIDataset.__init__.super___init___": {"doc_hash": "5d12c5daf1628e37cbe9766b85417f9100a6e29220f6fbf39f8e33b63abd2691"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/datasets.py_MaskedInferenceWSIDataset_MaskedInferenceWSIDataset._prepare_data.return.prepared_data": {"doc_hash": "71ba71c0e45eee5cfea644ad9c23a4cdf73a2b29d548e07c7476cbd9dc1c34c1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/datasets.py_MaskedInferenceWSIDataset._prepare_a_sample_MaskedInferenceWSIDataset._prepare_a_sample.return._": {"doc_hash": "1be28f93d545ede48eb32947185d7fb5dced053c1e64993e23d9c2d94b1452b5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/datasets.py_MaskedInferenceWSIDataset._calculate_mask_level_MaskedInferenceWSIDataset._calculate_mask_level.return.int_level_ratios_0_": {"doc_hash": "1714cfae272bb5fe1a00f42710bf5d4fc2c4170dcc4e6dd9fd0537074a1c5c93"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/datasets.py_MaskedInferenceWSIDataset._load_a_patch_": {"doc_hash": "9b518ce517296001b05fcb05506b57f573378c079d8a50dff781dff3fed0f5a3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/handlers.py_logging_ProbMapProducer.__init__.self.num_images.0": {"doc_hash": "d7f529c448c111e2d2770b5515cd63dccffb37a2d0a32c7d14b1ca7c79c92f97"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/handlers.py_ProbMapProducer.attach_ProbMapProducer.attach.None_3.engine_add_event_handler_": {"doc_hash": "69761e86fb4acd014237e894b294052e7b62fd43d93ba2694e2f14f164e7cf34"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/handlers.py_ProbMapProducer.__call___ProbMapProducer.__call__.for_i_name_in_enumerate_.if_self_counter_name_.self_save_prob_map_name_": {"doc_hash": "e0aee043e95acb62123952471af1134614581448eea5d17f40c1ad14bd7c2ab2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/handlers.py_ProbMapProducer.save_prob_map_": {"doc_hash": "76db6a3cc391560d822b134e9278ff6b3bfeb6b92bf4976b383cb32f6054e34d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/metrics.py_from_typing_import_TYPE_C_if_not_has_tqdm_.tqdm.return.x": {"doc_hash": "f657558440b55cfa9d94dc077e4ac98ab057d3e985a6c1633c82b0af018ae256"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/metrics.py_LesionFROC_LesionFROC.__init__.self.nms.PathologyProbNMS_": {"doc_hash": "6b1d3a23885a6faba149c4950265957b00b29365da42c268008684cff73028c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/metrics.py_LesionFROC.prepare_inference_result_LesionFROC.prepare_inference_result.return.np_array_probs_np_array": {"doc_hash": "908e3af627284985869580d63f96dd571a8dca775fecfb129c5b51b428c51694"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/metrics.py_LesionFROC.prepare_ground_truth_LesionFROC.prepare_ground_truth.return.tumor_mask_itc_labels": {"doc_hash": "71a59499c52b7a3270b6320ee5ddd489bd652621d9e7378d66dc7df49cd5ffe7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/metrics.py_LesionFROC.compute_fp_tp_LesionFROC.compute_fp_tp.return._": {"doc_hash": "5f2945338dab8f7d9704f1f4f47e756415322c5bf8ad09db4404848a79205824"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/metrics.py_LesionFROC.evaluate_": {"doc_hash": "863e66a7fce182652a645ed139583668d8af08e3b4323ac54402a19dca64f766"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/utils.py_from_typing_import_List__compute_multi_instance_mask.return.multi_instance_mask": {"doc_hash": "4399c7da0a7ac9146d60be409dea6e5da6602c7574a8c480be9bf249c22b4697"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/utils.py_compute_isolated_tumor_cells_compute_isolated_tumor_cells.return.itc_list": {"doc_hash": "40ff093a31ecbd569afc5c4923c1320cb065168ed96fbb23345f510b273ccac2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/pathology/utils.py_PathologyProbNMS_": {"doc_hash": "c96f8f96ad04542e3d662bb4b25d7d099012e8a505e0df6d4a84869106d338b6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_hashlib___all__._": {"doc_hash": "0f038e84738a2762310aecdbcf4c2a894a8aae15500ac475a947b834827f27ff"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_download_url_download_url.if_not_check_hash_filepat.raise_RuntimeError_": {"doc_hash": "8a16cded6745013f892a9bf1c048cedd136cebab709bfebb79983d6099b2c8c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_download_and_extract_": {"doc_hash": "00e1977ce31042ae14e7360d65b9284b7587ffc72443537eaaf8cbf00cefb79f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/__init__.py_USE_COMPILED_": {"doc_hash": "358f0aa8bd13557cf7c7b94f948a30aad01578babe3fac3911dbd395876cf5f0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_print_config__dict_append.try_.except_BaseException_.in_dict_key_UNKNOWN_f": {"doc_hash": "89c6d06a83613d16ba1d06498c655140288d2aba70d13e2ee7c30a0e529d3478"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_get_system_info_get_system_info.return.output": {"doc_hash": "b178d8a6705402cd12a6030ed07caf67314add94e997e05821ac03ebb6058620"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_print_system_info_print_system_info.if_not_has_psutil_.else_.for_k_v_in_get_system_in.print_f_k_v_file_f": {"doc_hash": "3f6eeb652631a87e6dc58bbf3d47b4a00d11c42b384307e8ce1f576434a8613a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_get_gpu_info_get_gpu_info.return.output": {"doc_hash": "1ed7da5373fdff1ffda07e826f5d7709f803cb73a4363e05eaec2e19af5766e1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_print_gpu_info_": {"doc_hash": "b9ca013fcea92c76b04db88f85a2fec13f054ad4093a5857582183d781ad8314"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataloader.py_torch_": {"doc_hash": "6bca5fe1bb50317ea1e18df3906dfb7d0aa0190f3e8df00f58e7fb2147b7d250"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_collections.abc_lmdb___optional_import": {"doc_hash": "73f0759d0239a1dd84b449570f8aaed55a305a9ac78d3ebba5d585a3d9000310"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_Dataset_Dataset._transform.return.apply_transform_self_tran": {"doc_hash": "801ae8ee339386e1335ce8ef1cdd5e76224b8dde38810c7b270f8f26f6b4f326"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_Dataset.__getitem___Dataset.__getitem__.return.self__transform_index_": {"doc_hash": "521b7153ee416cfd3c4ee2e32606e718cad0458829c1557e83f01f237cd986bd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_PersistentDataset_PersistentDataset._": {"doc_hash": "e824840345bd0b97f2b8022dec9ec1f5f5a7560043bf067901e7e6891d501b9a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_PersistentDataset.__init___PersistentDataset.__init__.if_self_cache_dir_is_not_.if_not_self_cache_dir_is_.raise_ValueError_cache_d": {"doc_hash": "24364637e4ac11c38a381e3ede8208fc87574be1bfe1796e159badae231ba602"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_PersistentDataset._pre_transform_PersistentDataset._pre_transform.return.item_transformed": {"doc_hash": "fd6f8b4cf4d28f0e4dbb6ae0a052bc4cd3adf9c0c3a51b38a6c8533e6ead91d5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_PersistentDataset._post_transform_PersistentDataset._post_transform.return.item_transformed": {"doc_hash": "1380c391f24daeef3f09e2fbc69e11914eb408de2c5d23a72e01d6d13a11ad32"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_PersistentDataset._cachecheck_PersistentDataset._transform.return.self__post_transform_pre_": {"doc_hash": "21231889c4717b79130980e7ba694653c9d0da0e02e087f25b26d9cc757c0efb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_CacheNTransDataset_CacheNTransDataset.__init__.self.cache_n_trans.cache_n_trans": {"doc_hash": "e947caba08a1c8acaca6cd3993afb785a0b4e24747b1339153f596d349813092"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_CacheNTransDataset._pre_transform_CacheNTransDataset._pre_transform.return.item_transformed": {"doc_hash": "ac6e4185729e141db925db260d1510a90efe30a7c2f94d1503905df5231827c1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_CacheNTransDataset._post_transform_CacheNTransDataset._post_transform.return.item_transformed": {"doc_hash": "64a36d4d42064c51c8c8361ca8bfaf0b62715ed71833d7fbee22a8af71a233bd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_LMDBDataset_LMDBDataset.__init__.print_f_Accessing_lmdb_fi": {"doc_hash": "6334f02b7a60814e3647011432e89a445083afdbe876e7d020eeea7b582480e9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_LMDBDataset._fill_cache_start_reader_LMDBDataset._fill_cache_start_reader.return.lmdb_open_path_f_self_db": {"doc_hash": "52cf82bbb0cb813baa82d24f3dbbfa2b312fd6a19271e6fe11bc2d209ecfa6b4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_LMDBDataset._cachecheck_LMDBDataset.info.return.out": {"doc_hash": "59767880273d2cd04aa7d82b6837bcaedd245a394cf49a4adeeb41b16f593f9f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_CacheDataset_CacheDataset.__init__.self._cache.self__fill_cache_": {"doc_hash": "a190fe8efad294af3315d10edc9a41663297b663fc0f946170a27babd8ed7895"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_CacheDataset._fill_cache_CacheDataset._fill_cache.with_ThreadPool_self_num_.return.list_p_imap_self__load_ca": {"doc_hash": "37e209fe44f1a9aca5d5568e7c3446c379972e9a660cb25a6981c62c31c9bc34"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_CacheDataset._load_cache_item_CacheDataset._load_cache_item.return.item": {"doc_hash": "2a0763a1217ae9c97d3c7f2e335d5a7ad2bbf39b04e1dff650bbdd2f23926714"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_CacheDataset._transform_CacheDataset._transform.return.data": {"doc_hash": "cfa6ce08fc787c1a11a22b53fbda5cc6558f13be27d778320084da093a852f0f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset._try_update_cache_SmartCacheDataset._try_update_cache.with_self__update_lock_.return.True": {"doc_hash": "8d11668bcbdd10076b5602b8b81bbec8777b391945c0f6562850751f5abf1db6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_ZipDataset_ZipDataset._transform.return.tuple_data_": {"doc_hash": "5976af1869916c2a556dceb25f63902d429cf2ed909c11901f6cec5d49502c66"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_ArrayDataset_ArrayDataset.__getitem__.return.self_dataset_index_": {"doc_hash": "0361b2bc8157804063550cc08c4ac16b7bd85b9c191ccd8297461bd356a9e7cc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_NPZDictItemDataset_": {"doc_hash": "c13de6355090a6885dfa3b547f7aeef764afd8a105b18d2c7475e2954f389b98"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py_json__compute_path_2.return.element": {"doc_hash": "c5f2ca00d11e8dd2fa66389dea7d635f9ee4e92188d5cdedfa4da282271b8e61"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/grid_dataset.py_from_typing_import_Callab_PatchIter.__call__.yield_from_iter_patch_": {"doc_hash": "6d7944d3f9abaacfe6e0b701a4a7ad77b38d5b890a72dad7c66f87bbda143667"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/grid_dataset.py_GridPatchDataset_GridPatchDataset.__init__.self.with_coordinates.with_coordinates": {"doc_hash": "49b77aab8d1ea513592bb104b31c24be44dba77452aeb644f22802624e4dc89d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/grid_dataset.py_GridPatchDataset.__iter___GridPatchDataset.__iter__.for_index_in_range_iter_s.if_not_self_with_coordina.else_.for_patch_slices___in_.yield_out_patch_slices": {"doc_hash": "f7245402624a58dc8a341db572d082ba0baa33de5337ff4e18c1501b90f509af"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/grid_dataset.py_PatchDataset._transform_": {"doc_hash": "a2915ce3079a38248199e57931644666b62e62d7784cceac55596b4dfe9adf31"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_dataset.py_from_typing_import_Any_C_ImageDataset.randomize.self._seed.self_R_randint_MAX_SEED_": {"doc_hash": "93c030898b7671d028f7040667b48b364933fc7cfa211bbbdf9cc6b30a6bd041"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_dataset.py_ImageDataset.__getitem___": {"doc_hash": "4051e40ba20285602d4ac662b3fdf9240447b5d689bcf66ba20858702a018f05"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_os___all__._ImageReader_ITKReade": {"doc_hash": "5a49defc8b47a7d8c5822901a94a0788cec23a5b90e8e4a567059d9a34e7f202"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py__copy_compatible_dict__stack_images.return.img_array": {"doc_hash": "af6f92ade7ff9f65ad3c628772d5ec5a8e76f103c4f15218b0eebac2223017f5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader_ITKReader.verify_suffix.return.has_itk": {"doc_hash": "a3bfc7d01252c9d698fa9d94003582738b16634d10d2411a8f3032406d71320e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader.get_data_ITKReader.get_data.return._stack_images_img_array_": {"doc_hash": "5c570ebf51473b028db4418c17b411fcbbcaa3a401cab42f90530e7927de582c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader._get_affine_ITKReader._get_spatial_shape.return.np_asarray_shape_": {"doc_hash": "02d68eaa7ea8307b77eff6d7525b63267c9b923076a56f2d345e05fe4a4d81e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader._get_array_data_ITKReader._get_array_data.return.np_moveaxis_arr_source_": {"doc_hash": "5add6978185de36d790a031cdd82265c4f76ee2fe9c0cd7547568e82c9ff76da"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NibabelReader_NibabelReader.verify_suffix.return.has_nib_and_is_supported_": {"doc_hash": "914ab9123489e0692efd17aa540ee0e1ce87c72a02edf53ee7891be83280a3f8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NibabelReader.get_data_NibabelReader.get_data.return._stack_images_img_array_": {"doc_hash": "26999b7ed80cbe1d2ed36a574b28852e5b83e7c8fed9d5f6355dd8206f601a84"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NibabelReader._get_meta_dict_NibabelReader._get_array_data.return._array": {"doc_hash": "66fe653e49c964de4e89b6f91f0fd12f2441a5c1e6e1f5dc1f0ee919be556044"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NumpyReader.get_data_NumpyReader.get_data.return._stack_images_img_array_": {"doc_hash": "b221a256d1642032b853bd0e3e0b30106cce3ff9ed83647b6e11eff7c2f252f8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_PILReader_PILReader.verify_suffix.return.has_pil_and_is_supported_": {"doc_hash": "278b93c1fdd4727d997cc776abdcb2dfe976a91f965c42eb5712d7bb1d436eec"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_PILReader.get_data_PILReader.get_data.return._stack_images_img_array_": {"doc_hash": "89de987147925c6eb062438aa507e74601178c48e22220458e64b2b676b11b86"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_PILReader._get_meta_dict_PILReader._get_spatial_shape.return.np_asarray_img_width_im": {"doc_hash": "c3c9f76e359bc15aeeb87b897d97b3b1010d7cbdfca4404d874e09d52f1778c1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_WSIReader_WSIReader.verify_suffix.return.is_supported_format_filen": {"doc_hash": "8f056643e53ae851b4a8e78bc7a193bacaffa658510679ea0ecb5b0df200fff3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_WSIReader.read_WSIReader.read.return.img__if_len_filenames_": {"doc_hash": "c615860e5b384b7d6ed4d3135d4d5797170d56d95ead4d2bbfb20a7570e2198e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_WSIReader.get_data_WSIReader.get_data.return.patches_metadata": {"doc_hash": "4c89b286a360e899e28355ee24c5b2e60a5dfe9cae8dc6193f9a8929ff77f575"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_WSIReader._extract_region_WSIReader._extract_region.return.region": {"doc_hash": "a1a84db1d807a7d21c6091a3a0f52c421b62162097c2da373146958454dd6b1b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_WSIReader.convert_to_rgb_array_WSIReader.convert_to_rgb_array.return.raw_region": {"doc_hash": "60a2618ce4d6c29dd6969c6f5ac2a432d9b08ba6ac8a4161216e790f46a3bb70"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_WSIReader._extract_patches_": {"doc_hash": "8ae1221a0f922ff8d11066264762425833e489803b68b1e18bcb639f0b6863e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/inverse_batch_transform.py_warnings___all__._BatchInverseTransform_": {"doc_hash": "a41f0a2efa65491f344de778a178087c6a9ad5d2f6536a4f67692ca28ac20662"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/inverse_batch_transform.py__BatchInverseDataset__BatchInverseDataset._transform.return.self_invertible_transform": {"doc_hash": "c3b18ab9192dd376b717d84935c630b9d212ce8da86187eb9f908abf16ce722d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/inverse_batch_transform.py_BatchInverseTransform_BatchInverseTransform.__init__.self.pad_collation_used.loader_collate_fn_pad_": {"doc_hash": "e8f0b93e76a1d6343be6940205d705afd9cc3bd2cb74f9aaaa8b207bf75e2545"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/inverse_batch_transform.py_BatchInverseTransform.__call___": {"doc_hash": "69c56fe34479a5fb2912624c008eb1faf6043b8657f74b1aa74c155b8cfbb378"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/iterable_dataset.py_from_typing_import_Callab_": {"doc_hash": "42e5306ac38a98c3ceca6319fe0ad0d38eed619174c5d0a7aec68d2b9c7a56ab"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_saver.py_from_typing_import_Dict__NiftiSaver.__init__.self.data_root_dir.data_root_dir": {"doc_hash": "c4c6a5a18c732e84eb1ae2ac1a2d19054bf7e23a361796e999abb6f4606b7350"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_writer.py_write_nifti.if_not_isinstance_data_n_write_nifti.output_spatial_shape_.list_output_spatial_shape": {"doc_hash": "c647371532c7370af86690c884044811f98cc88b48116f02b8c047a104c555b8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/samplers.py_from_typing_import_Option_DistributedSampler.__init__.if_not_even_divisible_.self.total_size.data_len": {"doc_hash": "98f5a782a87fd28ea0074eef230eb0fc7be54c7987da1ec48a57f03e04d64447"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/samplers.py_DistributedWeightedRandomSampler_": {"doc_hash": "c2cefcbcdff700fd0f1c32f07e61e20c3fa76baba85d21b79033aa92f1c8aee4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/test_time_augmentation.py_from_typing_import_TYPE_C___all__._TestTimeAugmentation_": {"doc_hash": "c7e387baf8ea9c024ef728f9bede94d6ebe68e6959ed0a789f7c9f60474e3015"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/test_time_augmentation.py_TestTimeAugmentation_TestTimeAugmentation._": {"doc_hash": "119c84aed17cdfb41b6ad8b082cd89abbd8f1cdcdd30235e7df66f079754e5d4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/test_time_augmentation.py_TestTimeAugmentation.__init___TestTimeAugmentation.__init__.self__check_transforms_": {"doc_hash": "15de8af80634fa6d765f49cb75bdd209470ce08d9744453c747f8bb46d78fb56"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/test_time_augmentation.py_TestTimeAugmentation._check_transforms_TestTimeAugmentation._check_transforms.for_r_i_in_zip_randoms_.if_r_and_not_i_.raise_RuntimeError_": {"doc_hash": "5b7354c2499a78258cfc5690487c392b953edd0431384d164780ad6d8a8140a3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/test_time_augmentation.py_TestTimeAugmentation.__call___": {"doc_hash": "936a63c4f8879d5eaf347ec42aa5a7cf685e7240be3609c15891a2cd03e38f81"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/thread_buffer.py_from_queue_import_Empty__ThreadBuffer.__iter__.try_.finally_._ensure_thread_completio": {"doc_hash": "a1c607155634ad67ce6233481dc048775ca0377df8b07da1e801025dd6da69b1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/thread_buffer.py_ThreadDataLoader_": {"doc_hash": "25923978a95aa90d1481161d43addfbde0eeee63341219b3d30df315d0d039e9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_hashlib___all__._": {"doc_hash": "b323a5a7000ff8d8338dc9ae12fa2d3822754d6579659127ba2095ab43ff9697"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_dense_patch_slices_dense_patch_slices.return._tuple_slice_s_s_patch": {"doc_hash": "9aa64f8acb5a6b93d1993de2975a6a0a1e60bec1ef59ea9a6604eff8aa49b57a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_list_data_collate_list_data_collate.try_.except_TypeError_as_re_.raise_TypeError_re_str_": {"doc_hash": "732716c0da83c43e483ba018c586109f6da6b2e94fa4c7007017f25b9d4250e7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_decollate_batch_decollate_batch.torch_to_single.return.d_if_d_numel_1_else_d": {"doc_hash": "cd7a976b8e814b2f525004ef3c4cbc757eafeb5ba8165a4c6bfc08e5c7a2271b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_decollate_batch.decollate_decollate_batch.return._key_decollate_data_key": {"doc_hash": "66fa58b664e1cba522a9249646c0dc7a3c05a99d7d8618b893d63ff22744250e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_pad_list_data_collate_pad_list_data_collate.return.PadListDataCollate_method": {"doc_hash": "5c505444d087bdd2b842abbba8ee42e4778e202228a8c6e350d3cef9dd07b49c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_no_collation_set_rnd.return.seed": {"doc_hash": "36f4ea89b3064f57fae7ae81b446e9182b05c2e2aaf96b051329a3bc52da0495"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_create_file_basename_create_file_basename.return.os_path_abspath_output_": {"doc_hash": "da4871cadbd9419844deeeea46e535fa2926f87b8b893c6a6da868caf125af08"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_is_supported_format_is_supported_format.return.True": {"doc_hash": "78a0f48f00cb1662707efca2f9bfdda2c07ef986660f7b53cf61ae3fba1e7d6f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_partition_dataset_partition_dataset.if_data_len_num_partiti.raise_RuntimeError_f_ther": {"doc_hash": "b3f465d7feb220b7afd918dbedd13a005fa3c480ab8c95cc91dd8ef9a415dcbb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_partition_dataset.if_drop_last_and_data_len_partition_dataset.return.datasets": {"doc_hash": "699290a877eafd725341605f9472d6003b19124cc9ab6c211419e9fd2386ac1e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_partition_dataset_classes_partition_dataset_classes.return.datasets": {"doc_hash": "2a6a0d3f41e9f08c183a5a364696329f8a9637b4f1b10d1d12d09182b1171151"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_select_cross_validation_folds_select_cross_validation_folds.return._data_item_for_fold_id_in": {"doc_hash": "7e4feb2d7d5808306cdece5b3662e2b6432b0d6a709cf7eec86e0cf14384fbfa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_json_hashing_": {"doc_hash": "a9dda6994f12bd84460d376be08901557fef52ae210c8fc5e86e7b8dbf69f481"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/__init__.py_EnsembleEvaluator_": {"doc_hash": "1b6d18f70bb17fc6046905f68d3da9c831ce2d6cc1ca6a03f519d82a0bc0d638"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_from_typing_import_TYPE_C___all__._Evaluator_Supervised": {"doc_hash": "e49f4d06da8184f9d1a4f96d1f4dc26d8d939c2aa9199897e0e110d1a20eacd3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_Evaluator_Evaluator._": {"doc_hash": "83dcded1c5aecf6f9fe9d7e2bd2728ccebabe27ddbbbbcb8090a62f9ac2a9be0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_Evaluator.__init___Evaluator.get_validation_stats.return._best_validation_metric_": {"doc_hash": "d8f14a93c37c2a701ff619860d2ec3377021d9e1b2b2cde7e7cc407ff821b73f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_SupervisedEvaluator_SupervisedEvaluator._": {"doc_hash": "ce783d8fe62c6c51b5fc4650e2d6d21ca0af979c8b251599cb9aa46cc3ba0ae3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_SupervisedEvaluator.__init___SupervisedEvaluator.__init__.self.inferer.SimpleInferer_if_infere": {"doc_hash": "762b84e917c6adcaa9d7bd165030e50c46a8a0a0fa00f38d2c5cc11113e30656"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_SupervisedEvaluator._iteration_SupervisedEvaluator._iteration.return.engine_state_output": {"doc_hash": "c2dee47c7b8e445cc32e23b966621136228a0030b42867e0d3c62d13fea08fde"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_EnsembleEvaluator_EnsembleEvaluator._": {"doc_hash": "ef1fcab2ec3619d8b014a36278d3993f6cfc6dcaf29ecdcf352ea4beaaac0997"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_EnsembleEvaluator.__init___EnsembleEvaluator.__init__.self.inferer.SimpleInferer_if_infere": {"doc_hash": "3960ccb35d1ed1b57fa34f39b1e5606d9af5d1474abc4aed3c3d2ac2d8f23db7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_from_typing_import_TYPE_C___all__._Trainer_SupervisedTr": {"doc_hash": "18c85aa8b87b459023f2eca334ec075581f0777a7197f6860defb46fe0a6c70a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_SupervisedTrainer_SupervisedTrainer._": {"doc_hash": "b38cb8f3f8a83258cfc292649f52d14f77cd79af3c5701aa7c92c79aaf82d1d1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_SupervisedTrainer.__init___SupervisedTrainer.__init__.self.inferer.SimpleInferer_if_infere": {"doc_hash": "a00ee54513d14596a191c2553da154b4e3d3a29ffceba7546f97d4a87f45a6e2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_SupervisedTrainer._iteration_SupervisedTrainer._iteration.return.engine_state_output": {"doc_hash": "3f82764eba7682406aaafbdcbf8504d1af9cd48c2e6267a5d008662ed1c485a1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/utils.py_from_typing_import_TYPE_C___all__._": {"doc_hash": "1b4c19c1c705dbe8fae692407810cbe5bc35da9eeb9167479661a112c0635e25"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/utils.py_IterationEvents_GanKeys.DLOSS._d_loss_": {"doc_hash": "c84381bda3daf6359bb6aec33d7b694cc1161f7fa2cbec28bc53dd205f3fbaed"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/workflow.py_from_typing_import_TYPE_C_if_TYPE_CHECKING_.else_.EventEnum___optional_i": {"doc_hash": "ca8ee83f1cc44f9d15c35655da1d3b1f3fd3c5eb9b1fb20970c42a2484fb367e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/workflow.py_Workflow.__init___Workflow._register_post_transforms.run_post_transform.engine.state.output.apply_transform_posttrans": {"doc_hash": "7adcf2cfddb262152e5aa264690b20af3a0130d011593928c91007b4fa14cca4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/workflow.py_Workflow._register_metrics_Workflow._register_metrics._compare_metrics.if_engine_state_key_metri.if_current_val_metric_e.engine.state.best_metric_epoch.engine_state_epoch": {"doc_hash": "7f9f82df39a7c38acfe6cd075379997bc56f3a97c17ceb44523f010d58be7bf8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/workflow.py_Workflow._register_handlers_": {"doc_hash": "7ac66d56b1223fe724566136dbc00cd90ec57f4c8e13e42446c05a1a159a2c55"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_logging_if_TYPE_CHECKING_.else_.DiskSaver___optional_i": {"doc_hash": "91e218eee76d8ceba56cd0ece020f405bfd474ae189209d311724a6771cc6cd6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.__init___CheckpointSaver.__init__.if_save_interval_0_.self._interval_checkpoint.Checkpoint_": {"doc_hash": "3a6a921d2ad38ebeb182ea68742b045a6e14cfa8cfc2f21f9a92623be61b3391"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.load_state_dict_CheckpointSaver.load_state_dict.if_self__key_metric_check.else_.warnings_warn_no_key_met": {"doc_hash": "31477a69477fea982b5a0d68eed1f1d51d7ac4a2a8781f966566b671500c4b1a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver._delete_previous_final_ckpt_CheckpointSaver.completed.self_logger_info_f_Train_": {"doc_hash": "a876bcb71e5664a1192e4590db2096d35718810994862fcf54203cce1ca0e8a0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/classification_saver.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"doc_hash": "52c3b9dbbdf5fbc1b977e1d76212f60b61cc5e06d8c776a686aa60d05ac8a893"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/classification_saver.py_ClassificationSaver_ClassificationSaver.__init__.self._name.name": {"doc_hash": "17bb8dd1e9af4c1c49c8ae7a3a1a0a58ea55a0a3a2f9fcc9bc101251d0758f4a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/classification_saver.py_ClassificationSaver.attach_ClassificationSaver.attach.if_self__expected_rank_an.engine_add_event_handler_": {"doc_hash": "c5cce4e097f19ad92558be74a8bf224b3b697c67daacdd6c3dd33df343870105"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/classification_saver.py_ClassificationSaver.__call___": {"doc_hash": "acf4593d49c2a8fbbb62602672b35613bdee8c8cf96c403ab50a146a3e418415"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/confusion_matrix.py_from_typing_import_Any_C_": {"doc_hash": "60b11ea501f27081d75c187674e01677df8d2c32241fc21cb4900badc3e72b3e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/earlystop_handler.py_from_typing_import_TYPE_C_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"doc_hash": "0a645a564fc2474baa39cafb2ba9158247fca2d97a327f83afd252e850a27021"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/earlystop_handler.py_EarlyStopHandler_": {"doc_hash": "7615b3fe7cff3f5ea98e5e59da56e70455c24c14ccba4c41bd74fd5978e59efb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/garbage_collector.py_gc_GarbageCollector.attach.if_not_engine_has_event_h.engine_add_event_handler_": {"doc_hash": "4b17bb82477e01b0819cc2fe7420956e6344cc8cbb8db5105f5570866125985b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/garbage_collector.py_GarbageCollector.__call___": {"doc_hash": "02c21c4f63878e2b9ac3695cd81fb47790fe423eea371a8623b134881b83ddcb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/hausdorff_distance.py_from_typing_import_Callab_": {"doc_hash": "0f6579b3154419a6f4462855ee9a08c593ee5bc31efe8d641bb2fc9bc29ab33c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/iteration_metric.py_from_typing_import_TYPE_C_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"doc_hash": "625637222294900fe04267fdf239ba10b166940c9a07cec75fe064f1fe62f125"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/iteration_metric.py_IterationMetric_IterationMetric.reset.self._scores._": {"doc_hash": "1af0ff7453322a129732322abe74ea305f8d6a1929766ecdafb440a7f892c6b0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/iteration_metric.py_IterationMetric.update_IterationMetric.update.self__scores_append_score": {"doc_hash": "105164406b075ae00308514db5cfaa090740c49524c91eebf2715bbd334b2e65"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/iteration_metric.py_IterationMetric.compute_IterationMetric.compute.return.result_item_if_isinstan": {"doc_hash": "783e6bfb9723a89be52424ae157a0e631e5fc81541965a6f1abae06a77b701bd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/iteration_metric.py_IterationMetric._reduce_": {"doc_hash": "d88d804e793eb0e15ff5e8037993e6899e586a203f5b90d0befd5da788fa22c5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/mean_dice.py_from_typing_import_Callab_": {"doc_hash": "ab15f0218165c100a872f8ffc65b9928a84974d61b554ae76324e327c063ef74"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/metric_logger.py_from_collections_import_d_MetricLoggerKeys.LOSS._Loss_": {"doc_hash": "d28321706028217eb08a6442748bf05382df3a1adab5110e521879fd6f21da8f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/metric_logger.py_MetricLogger_": {"doc_hash": "9309565f550c1c15e92759cd68a1aac616a914cfbbb7d5720d59874a46eb260c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/metrics_saver.py_from_typing_import_TYPE_C_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"doc_hash": "bed4e260b88f547ba98d76552738139af95befc24a1a326df095fc93e7feb3d9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/metrics_saver.py_MetricsSaver_MetricsSaver.__init__.self._filenames._": {"doc_hash": "c0f3348f7c487c437a4cdf20c3f52cd867443b8263b21ee81c2ea167fe66839b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/metrics_saver.py_MetricsSaver.attach_MetricsSaver._get_filenames.if_self_metric_details_is.self__filenames__filen": {"doc_hash": "db0091732f2144a23ef767d89d0de0cbf7945079a5fd3efdaab56a40e306a4a3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/metrics_saver.py_MetricsSaver.__call___": {"doc_hash": "7c6fab80c281bcbe03ac17842eb5737511fe13ec785ebcf9d04e5ee692b59a5e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/parameter_scheduler.py_logging_ParamSchedulerHandler.attach.engine_add_event_handler_": {"doc_hash": "c2beb039cb5f0b21db76a3120ee3af521ec442a2710b056863577c3301fa9445"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/parameter_scheduler.py_ParamSchedulerHandler._linear_ParamSchedulerHandler._linear.return.initial_value_delta": {"doc_hash": "6edd3943f38226dd63eef6c66840439b2f087e06ea7042f7d8b15ac888964946"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/parameter_scheduler.py_ParamSchedulerHandler._exponential_ParamSchedulerHandler._exponential.return.initial_value_gamma_": {"doc_hash": "163ee21bd25a7c6650503a507f9a43af6edf9cad7630153b9ab6135712659711"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/parameter_scheduler.py_ParamSchedulerHandler._step_ParamSchedulerHandler._step.return.initial_value_gamma_": {"doc_hash": "e9d44bb37177f355a9960e9e7c6d2904526bfd79e0c37bebdb9dd30d7b7d7a4f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/parameter_scheduler.py_ParamSchedulerHandler._multistep_": {"doc_hash": "138549fbf21e521086f37cce01dfa97d64cf879ddc5f63cbf13546640ca27078"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/roc_auc.py_from_typing_import_Any_C_ROCAUC.__init__.super___init___": {"doc_hash": "533ab4e28c31ae16e6b8faa62691a12c56945b0177fbba0aa315c4d514bd477e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/surface_distance.py_from_typing_import_Callab_": {"doc_hash": "f3f44597043878fde1e0c37985a728d64a067c6ca837f7151eee17f316f3c55d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardHandler_TensorBoardHandler.close.if_self_internal_writer_.self__writer_close_": {"doc_hash": "41deed307049397689582e3e15377e96f59f7f12579a827d3bdfb8ff26488d4b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/transform_inverter.py_warnings_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"doc_hash": "f65c83412267a0379e2be2b80e164e71ed508bc00450a0cf8091e0f45848e38e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/transform_inverter.py_TransformInverter_TransformInverter.__init__.self._totensor.ToTensor_": {"doc_hash": "73d6a5fc59721b2e9f22435d39a5ab4dc80cefd81670b3e5076468e96d6bfb7f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/transform_inverter.py_TransformInverter.attach_": {"doc_hash": "c14dc8025456aa87ffc26dad089397e24988e3070f62fc7e2e9ea887d171a42e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/utils.py_os_stopping_fn_from_loss.return.stopping_fn": {"doc_hash": "92d20725f0fb8461d38551fb77ff6221aba68d815b8c71bb1e848284df3ba299"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/utils.py_evenly_divisible_all_gather_evenly_divisible_all_gather.return.torch_cat_data_i_max_l": {"doc_hash": "00d2fa90e600a8185b735e1cbde080f99319072a61a4405137ac45c2c85dba28"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/utils.py_string_list_all_gather_string_list_all_gather.return._i_for_k_in_result_for_i_": {"doc_hash": "ffe90368fcb97b484c468dde053c9d09441436282620804e9a2d4aef6f7b1e5f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/utils.py_write_metrics_reports_write_metrics_reports.if_metrics_is_not_None_an.with_open_os_path_join_sa.for_k_v_in_metrics_items.f_write_f_k_deli_str_v": {"doc_hash": "07957fb4862940a75ca2e29590ce4ebaf7416a19461f32e6e3dd45bb8a448d17"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/utils.py_write_metrics_reports.if_metric_details_is_not__": {"doc_hash": "41608db75e6a124aca0bbf1b4f81850ae6d5e2b6f3e358a3b2d5489ac8717053"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/__init__.py_Inferer_": {"doc_hash": "237e37b6b13e3d8780fbad6394789e67d1e0780ae44a997d51e113c1bbfa571e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/inferer.py_from_abc_import_ABC_abst_Inferer._": {"doc_hash": "1ba092a08b4252f581f88c48da3822904701ae0d8dcff40550d1a5e3af4473c5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/inferer.py_Inferer.__call___Inferer.__call__.raise_NotImplementedError": {"doc_hash": "a9469854562bc2a24d597fbf123db0eb1a2a92f845e43ee65991b6ad06366300"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/inferer.py_SimpleInferer_SimpleInferer.__call__.return.network_inputs_args_": {"doc_hash": "13fef84aa007b53f1625f979035b9a00b5e6ba45f5e4a1a958fa3b8b5307edd5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/inferer.py_SlidingWindowInferer_SlidingWindowInferer._": {"doc_hash": "dd8f08dc7ea6dbb71eecee7bf93ac7423ed865a81dc5b0ee1ea7a88b3c3a58e9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/inferer.py_SlidingWindowInferer.__init___SlidingWindowInferer.__init__.self.device.device": {"doc_hash": "e84856f32e51e8bf2df131da21c368491af1d8a473ced1f806fd1d3e425e6c96"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/inferer.py_SlidingWindowInferer.__call___SlidingWindowInferer.__call__.return.sliding_window_inference_": {"doc_hash": "a05756187d791372236ab3548ff4e15144180aaa80c7ddcd30482129fb2c79e8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/inferer.py_SaliencyInferer_SaliencyInferer.__init__.self.kwargs.kwargs": {"doc_hash": "30dfbbebcf4c2063e5ac18019b7acce6129382faff4721b84fb5a7de67702f40"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/inferer.py_SaliencyInferer.__call___": {"doc_hash": "d5044f789c40cb9d87d9035ed5c68dc884407505f3e648e86e770040cbb75e03"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/utils.py_from_typing_import_Any_C_sliding_window_inference._": {"doc_hash": "9dc7fa1f2b9ecc12ff153a6f9365d9abdbec3a6f4086ccba3bbf5bc0b1fc267f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/utils.py_sliding_window_inference.num_spatial_dims_sliding_window_inference._initialized.False": {"doc_hash": "08d7f01c19491913cdb9a2fd6f7e4f6b183ab5268e26b9116e8ea876858853b2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/utils.py_sliding_window_inference.for_slice_g_in_range_0_t_sliding_window_inference.return.output_image_final_slicin": {"doc_hash": "671e5276a094a23457b62e2e65b61aa06d816000a49fdb5d7cb3176427c921f2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/__init__.py_BendingEnergyLoss_": {"doc_hash": "fcd6dfb2b4a8324647af1f7aab8954c8448bd5ba341989c4a30d595f885ae7b7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/deform.py_from_typing_import_Union_spatial_gradient.return._x_slicing_s_x_slicing": {"doc_hash": "9585c71e39601d49be12df6ef8733777bca5c9ded9ee7d78d1daefc3cb1251ca"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/deform.py_BendingEnergyLoss_BendingEnergyLoss.__init__.super_BendingEnergyLoss_": {"doc_hash": "bc52bcd853d6daf88eec6b3acabd38463ed5add56ee9beb6ff16014b2a3411ed"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/deform.py_BendingEnergyLoss.forward_": {"doc_hash": "8fcd2e2fab8cf61b4a76f60a83191f43cb5cd47f3a3bbc0243dd8ce2607baae1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_DiceLoss.__init___DiceLoss.__init__.self.batch.batch": {"doc_hash": "fddc96284c539571724b9a3af5233149836b1d24691a8e0d417f3822cccf1dd5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedDiceLoss_GeneralizedDiceLoss.w_func.return.torch_ones_like_grnd_": {"doc_hash": "72f87a346f5e3bc7e45c13929d54cc79d010a0839e0b57323696e00fec283f1f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss_GeneralizedWassersteinDiceLoss._": {"doc_hash": "76e59f493431d181becc4549edd85c6f500ead37f9e6be62fdefa88c05149351"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss.__init___GeneralizedWassersteinDiceLoss.__init__.self.smooth_dr.float_smooth_dr_": {"doc_hash": "d0f819f269b9375e62e9861a281d3704fa8235f93b7e58607b310389c08ea45c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss.forward_GeneralizedWassersteinDiceLoss.forward.return.wass_dice_loss": {"doc_hash": "c5a852cb720d015f40b84739c5c868ab42467b4aa07e7c58776e9b5ebbce88cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss._compute_generalized_true_positive_GeneralizedWassersteinDiceLoss._compute_generalized_true_positive.return.generalized_true_pos": {"doc_hash": "2ad55dc4f24ad8687e6d7dfbf0cdeef611f1077a664fde533e9c8e6cc5c81f55"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss._compute_denominator_GeneralizedWassersteinDiceLoss._compute_denominator.return.generalized_true_pos": {"doc_hash": "3e0062fa978d046fcb760a0b742025f9bb7a1918f4952455dcda402ccc1103dc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss._compute_alpha_generalized_true_positives_GeneralizedWassersteinDiceLoss._compute_alpha_generalized_true_positives.return.alpha": {"doc_hash": "baffe2d428837a654f87b6adbecb51b415926acb301d8cb9cf49915a3c904638"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_DiceCELoss_DiceCELoss.__init__.self.lambda_ce.lambda_ce": {"doc_hash": "beda407078b0bf650b59ba4b3f15ff5c9226c661fda74a89f8f62fc9d6b52d58"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_DiceCELoss.forward_DiceCELoss.forward.return.total_loss": {"doc_hash": "429f595f2ed834a5447bc9235620964cdc44ca55d2cfb89d17aba9732dd40200"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_DiceFocalLoss_DiceFocalLoss.__init__.self.lambda_focal.lambda_focal": {"doc_hash": "9ef43d1c3f297bd762c31ada63e9226d53b1e2043418b09bd97cf2e78dbdcead"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_DiceFocalLoss.forward_": {"doc_hash": "ff9f5cd01940fa571d7930e15d15387496631611a58bc7d1acc0a78ca81df73d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/focal_loss.py_warnings_FocalLoss.__init__.self.weight.weight": {"doc_hash": "923ca567958039029bb3a0f3f34d905c5da9db521df56d0b7b6dd234d462f626"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/focal_loss.py_FocalLoss.forward_": {"doc_hash": "5906792a7927cd5447b04ed60d05b25676a0dd9730d9ed8121bded891e685e31"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/image_dissimilarity.py_from_typing_import_Tuple__kernel_dict._": {"doc_hash": "27ee4e8ae754967e37dfa9939d38a67e03cb9afbcf843cc8e77a1020bb105107"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/image_dissimilarity.py_LocalNormalizedCrossCorrelationLoss_LocalNormalizedCrossCorrelationLoss.get_kernel_vol.return.torch_sum_vol_": {"doc_hash": "3918236aa2f2d06d6431165f9a6eee043985870053123061c2f93069150e1775"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/image_dissimilarity.py_LocalNormalizedCrossCorrelationLoss.forward_LocalNormalizedCrossCorrelationLoss.forward.raise_ValueError_f_Unsupp": {"doc_hash": "b8d118eaaf0889668d7e07eb4381591fc93e14a0c90df1b04726e2a84d0848e5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/image_dissimilarity.py_GlobalMutualInformationLoss_GlobalMutualInformationLoss.__init__.self.smooth_dr.float_smooth_dr_": {"doc_hash": "9cba9f6afcd940258d31b90314ff414c748f66663d8e9d21bc95c6749e94c5d7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/image_dissimilarity.py_GlobalMutualInformationLoss.parzen_windowing_GlobalMutualInformationLoss.parzen_windowing.return.weight_probability": {"doc_hash": "2d240ddc750542fa9cd7da5a9153f7bf9d88cc92d93a597bd310815c7f59bb20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/image_dissimilarity.py_GlobalMutualInformationLoss.forward_": {"doc_hash": "76f4af1563b59d91830a3460cd60b3f1afb8f14ddedf03ffe87e793c7582edc1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/multi_scale.py_from_typing_import_List__kernel_fn_dict._": {"doc_hash": "453aa4a629eec78c9a39de714b86cf95584b5808d167c6a700bf4f712eac900d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/multi_scale.py_MultiScaleLoss_MultiScaleLoss.__init__.self.scales.scales": {"doc_hash": "63d6e11a69762dc8d2b9409aaeb0e7201715ce9977eba38e71203c4691e55d44"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/multi_scale.py_MultiScaleLoss.forward_": {"doc_hash": "699a789071547f73ccfdca4a393698deac4fe69d764637c989a628a68fdc3b56"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/tversky.py_warnings_TverskyLoss.__init__.self.batch.batch": {"doc_hash": "104d80af7c8b092c4d51688ebc24949d0afcfcbda122f0926191ac7a47f6cf76"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/__init__.py_ConfusionMatrixMetric_": {"doc_hash": "2f98d5d0509b2a533b98d9de69301699e291480e825dfc1c140bf408e6ce7080"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/confusion_matrix.py_warnings_ConfusionMatrixMetric.__init__.self.reduction.reduction": {"doc_hash": "f66481bd99f0174c865a68044b77973a3e8064b9ef7a7eff1f60df8e0c13f789"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/confusion_matrix.py_ConfusionMatrixMetric.__call___ConfusionMatrixMetric.__call__.return.confusion_matrix": {"doc_hash": "70dbe398f90cf635c971e5f1ed9ecd2ddfa465887e54fedc5cd8ac67dea2640a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/confusion_matrix.py_get_confusion_matrix_get_confusion_matrix.return.torch_stack_tp_fp_tn_": {"doc_hash": "64e96308c9908e13d09c9a7e42c757a2bb132c1fa609cf1f8337ba2af5fa6aef"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/confusion_matrix.py_compute_confusion_matrix_metric_compute_confusion_matrix_metric.nan_tensor.torch_tensor_float_nan_": {"doc_hash": "b2b811860d71413b6a889813fd7fa51cfaffc1de6283da4dc231d9714e41670a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/confusion_matrix.py_compute_confusion_matrix_metric.if_metric_tpr__compute_confusion_matrix_metric.return.numerator_denominator": {"doc_hash": "a6e93a7d7741c9fbacdfd03f5141fa8723c35cf3f76066a7f360fe76d420b350"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/confusion_matrix.py_check_confusion_matrix_metric_name_": {"doc_hash": "fcc156fbb857f9e0939b087a66866b8a483ce4877ca83a6e8f154c7f3a43028b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/froc.py_from_typing_import_List__compute_fp_tp_probs.return.fp_probs_tp_probs_num_t": {"doc_hash": "749e651c07ad000a45d13277984de51b7180cd92ce879646b536182cfad126ba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/froc.py_compute_froc_curve_data_compute_froc_curve_data.return.fps_per_image_total_sens": {"doc_hash": "6ae9187bd01112a2bcd30afbc047fa70e6eb8ed23509879424385829dfd6e9a0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/froc.py_compute_froc_score_": {"doc_hash": "4169847d69842fdba1388925f640d3d5a9cefbfe4f2d3b08c0b5d0d546ce4813"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/hausdorff_distance.py_warnings_HausdorffDistanceMetric.__init__.self.reduction.reduction": {"doc_hash": "4d4f1b4cbc9a8abf9b1f022486127c1b1ab23504a429896794bbde7853c54ad3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/hausdorff_distance.py_HausdorffDistanceMetric.__call___HausdorffDistanceMetric.__call__.return.f_not_nans": {"doc_hash": "40d064bfb37ea9ad3064b5f310dd2a12d8852c92d766ddb2fc37f7eb825c4c87"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/hausdorff_distance.py_compute_hausdorff_distance_compute_hausdorff_distance.return.torch_from_numpy_hd_": {"doc_hash": "473fb388c966902b75f641c59e13ed8b299bbd739a254c1050955d1f2db0d100"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/meandice.py_warnings_DiceMetric.__init__.self.reduction.reduction": {"doc_hash": "da00913b0433f7f4c124c215dae7cc310b88413cd55ed651205d305bbe462735"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/meandice.py_DiceMetric.__call___DiceMetric.__call__.return.f_not_nans": {"doc_hash": "c0594cd40fee7d0338fc314363a42030ff8772cf35732cc249b64a9e59a17511"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/meandice.py_compute_meandice_": {"doc_hash": "07bc70871faccac56abcdc0e7e5bb48a639e5a25df926effa244958e54e170d1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/rocauc.py_from_typing_import_Union___calculate.return.auc_nneg_n_nneg_": {"doc_hash": "3223d5041eeba9af5fad677a634b73c9be3cd03a2ad58db63061ec5b816f3908"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/rocauc.py_compute_roc_auc_": {"doc_hash": "bc01e1ed5352711887d29fe10b8912c1f4d2c9938c865427df7240890d40952b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/surface_distance.py_warnings_SurfaceDistanceMetric.__init__.self.reduction.reduction": {"doc_hash": "1ab02a30d7201f1228e5823e6525d3609277c616228a84cc16f7cc9aa2d76a20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/surface_distance.py_SurfaceDistanceMetric.__call___SurfaceDistanceMetric.__call__.return.f_not_nans": {"doc_hash": "ec76f9455b1f26505da4ea5175eab78cc2a80981badf6f44cdfe53ecdf2c9bf4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/surface_distance.py_compute_average_surface_distance_": {"doc_hash": "53c712014e00d7eb951e630a0c84d27454d95542d6dcbf6234f13a9ab1e86ac2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_from_typing_import_Tuple____all__._ignore_background_do": {"doc_hash": "2166a930c3c00063c61ec90c811137b206c9e73d42c2401fe5d52104d05ecec3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_ignore_background_ignore_background.return.y_pred_y": {"doc_hash": "decf2a57e03934952760323bbc9289c2bf9b4c076487b660921e46b95529c641"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_do_metric_reduction_do_metric_reduction.return.f_not_nans": {"doc_hash": "b04d72499f6ee5639dbdad67d3d95ccb093fa45352420dfe28ce4b2577ab2e7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/__init__.py_eval_mode_": {"doc_hash": "fb6ccc9d4ebfa8e1e155000a00eafbdc78ca442b03810945f438f59db55e7ecd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/__init__.py_ADN_": {"doc_hash": "a8dfb8bfb2a544714572aaabe99e572cc9703a2aeed1173937f3af6fc1940104"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/acti_norm.py_from_typing_import_Option_ADN._": {"doc_hash": "484f48f469c99c607ebec348d52eb392acf7ef58151ce466b357b93960d401f0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/acti_norm.py_ADN.__init___": {"doc_hash": "2eb26fef742ef81b449c276788240f90bac80a47a9166cbe5a95cc803b8d8013"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/activation.py_torch_Swish.forward.return.input_torch_sigmoid_sel": {"doc_hash": "0613cba29e1ad9a4bbd2d194db7a0fdb8a72e2d8767a323ffe70cac5c0fe77f9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/activation.py_SwishImplementation_SwishImplementation.backward.return.grad_output_sigmoid_in": {"doc_hash": "35f11877e62f493ef7b32fa298fe0f4e97406d04adc9511cf3e68fa61b495e68"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/activation.py_MemoryEfficientSwish_MemoryEfficientSwish.forward.return.SwishImplementation_apply": {"doc_hash": "be32ec0d0cd7dc821b5cac58e5dc0961b9e48ebc6d0896db667f21c4713abddc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/activation.py_Mish_": {"doc_hash": "6abbd81b0ff1e346160d96c820a96ab5d364d43b3eac8e5005d75420476af432"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_Convolution.__init___Convolution.__init__.if_not_conv_only_.self_add_module_": {"doc_hash": "3d86c5c81b78f49f803809e4b1158fdfa52363bceaafe54ff78ab9395ed6e8c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/crf.py_torch_CRF.__init__.self.iterations.iterations": {"doc_hash": "c58292032cfdb8c0c552821bb38f7788cb45bdd5a99afafa3bc40dbd3d9e887e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/crf.py_CRF.forward_CRF.forward.return.output_tensor": {"doc_hash": "e4409c3514e27ffa112e538c663efeea284f08f0968b89522c5364b766c0d1ba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/crf.py__helper_methods_": {"doc_hash": "244423754500a4df549c6530107b30836ec000c8ea08c9cfdf7959ddde9786e7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_norm_layer_get_norm_layer.return.norm": {"doc_hash": "0981417e12a2e0e56b1f0f52972cd2319558a750ef1ed556b2901af149efe146"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_from_typing_import_Option_get_conv_block.return.Convolution_": {"doc_hash": "1deb90ae2719d69653bc0f8d75d49a9812bd0f011decd54b69037f54ca264fe2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_get_conv_layer_get_deconv_block.return.Convolution_": {"doc_hash": "7b4f4b154c54fb54ab57b654fce0a6e691f0ed1055af9c9265c77a9a4a44731a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_ResidualBlock_ResidualBlock.forward.return.out": {"doc_hash": "6164b84e90724c68633231715ebb132fd29f420207b288c1204b2c482ff12c64"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_LocalNetResidualBlock_LocalNetResidualBlock.forward.return.out": {"doc_hash": "d4dc65aabec5dd677f4d2a466c47bdfc77b29f5d5bf2e1eed93af81f4e5fa1e3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_LocalNetDownSampleBlock_LocalNetDownSampleBlock.__init__.self.max_pool.Pool_Pool_MAX_spatial_di": {"doc_hash": "7adf4ad15e39999143be5b5403d92ba41fb49515942fef0fa71727b0f24efa59"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_LocalNetDownSampleBlock.forward_LocalNetDownSampleBlock.forward.return.x_mid": {"doc_hash": "0769757010b13aa7a178c2f0591e925f7dcaaaa40504acfa9148a8f820a570a0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_LocalNetUpSampleBlock_LocalNetUpSampleBlock.addictive_upsampling.return.out": {"doc_hash": "00b4792369012dc930a29d307051c7cddb5048d4f0891e1858561c279be3d0f1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_LocalNetUpSampleBlock.forward_LocalNetUpSampleBlock.forward.return.out": {"doc_hash": "352d57f44daa33e3991f97747dd5eb1f6b6bf0f8badfe07b42939583c13b6282"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/localnet_block.py_LocalNetFeatureExtractorBlock_": {"doc_hash": "94a0689a7aca371fd1eea2e892d4abec03b83630e0c79a4d5527007e3b7a06f3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/regunet_block.py_from_typing_import_List__get_conv_block.return.conv_block": {"doc_hash": "6b70a2db2b49d1a709a680fd53352a4b2bcc844062cd12b08f128afcd02e39de"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/regunet_block.py_get_conv_layer_RegistrationResidualConvBlock.__init__.self.acts.nn_ModuleList_nn_ReLU_": {"doc_hash": "986a4849d7ce62c57430489b397a6b741e396fed48c027a2811192d66d45576c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/regunet_block.py_RegistrationResidualConvBlock.forward_RegistrationResidualConvBlock.forward.return.x": {"doc_hash": "091b8754ed749ee28f85822a865d8f86028044eb78cdb91b0a008aebd9c11412"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/regunet_block.py_RegistrationDownSampleBlock_RegistrationDownSampleBlock.__init__.if_pooling_.else_.self.layer.get_conv_block_": {"doc_hash": "23603dbf9465265d9d2c977b12452f4ff26d2d7695e993a21730d9588941851f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/regunet_block.py_RegistrationDownSampleBlock.forward_get_deconv_block.return.Convolution_": {"doc_hash": "243ba5a6e4a162ab5bb04eaa45af8800d97cc6e1501b5c11d8b95a399a1c6185"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/regunet_block.py_RegistrationExtractionBlock_RegistrationExtractionBlock.__init__.self.layers.nn_ModuleList_": {"doc_hash": "32b943c07ded55b060e8c2b68823bbcafb72178570434637a503c42c0b95dc0d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/regunet_block.py_RegistrationExtractionBlock.forward_": {"doc_hash": "e3a1740f7cc214d82cfd4308a343f2c63372aa50cdce442a3ca3f8bff1687c45"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/segresnet_block.py_from_typing_import_Union_get_norm_layer.return.norm": {"doc_hash": "b21f31a59a5892c88625fcd9bb657d00c3560ec264b4b2e2524d5fafb5a0286f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_math_ChannelSELayer.__init__.self.fc.nn_Sequential_": {"doc_hash": "5e6e1e960343520e14400203bcb7ee2bdc67560c3b5462f9b45b7b01ff2ac1df"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_ChannelSELayer.forward_ChannelSELayer.forward.return.result": {"doc_hash": "11c7c86b1f309e75c2a4c324e6459f7294e3576f53fe5c63580d1ef7f15ff383"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_ResidualSELayer_ResidualSELayer.__init__.super___init___": {"doc_hash": "0a9e62d7e613bcc1a23894c105cf6ff3f75a31bbec12231d3badaebbbdee5ecd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEBlock_SEBlock.forward.return.x": {"doc_hash": "274bbe964a5b75d0d47711e74fc132a337ce57622ed97618831657bf5e90df50"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/warp.py_warnings_Warp.get_reference_grid.return.grid": {"doc_hash": "057af7675db1d85d1ccd1360d721adbe207b8b9adacdf55ca13ec1aaa79290ca"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/warp.py_Warp.forward_Warp.forward.return.grid_pull_": {"doc_hash": "fb0f40267c20664d795f97c4e7aee06f66dd1fe0d1626cdc92c3e6eedb5b396c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/warp.py_DVF2DDF_": {"doc_hash": "a08acce846f50adfc592d6e341903d0f6f6561b1e64b277b86a363a7a7d42033"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/__init__.py_calculate_out_shape_": {"doc_hash": "3aa34f3112edd64ddf1d4a06c18b80da1d3371af510f7dc7c59a884e07deea26"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_split_args_split_args.return.name_obj_name_args": {"doc_hash": "651ef947c5d229a027e93ff8a05555909ff699d134ec765ea0245dc3fc58159c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py__Define_factories_for_th_convtrans_factory.return.types_dim_1_": {"doc_hash": "f6324849b7cd0cbd381cad7d903e706439bdc698e6a38c29d712dbca80b9ca94"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_maxpooling_factory_": {"doc_hash": "564cee47bca9af8063a6285834a0bc6139e309a0cd09185af707f51096434285"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/filtering.py_torch_BilateralFilter.backward.return.grad_input_None_None_N": {"doc_hash": "8f692c5da8d0ba3a54fb3301bc6b85c9119dcd1f58d0af6029b80471efe0b5e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/filtering.py_PHLFilter_": {"doc_hash": "20b1d08077d1ae391235c4a418ccee151c95dfe31700fb11d2e5a3955d7379e0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_math___all__._": {"doc_hash": "d9c94578bdd8820627213d6a800671093afa29025566ea22b09070f36f6d84c2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_ChannelPad_ChannelPad.forward.return.x": {"doc_hash": "4321627d1847d96e141302aafb8a47b484300d84d10df68fc4e7dac5af7ba9fe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_SkipConnection_SkipConnection.forward.raise_NotImplementedError": {"doc_hash": "8190ddef5dfa23334b52888fe5dd2162bc34f2a5a903d79067976ce163f0081a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_Flatten_Reshape.forward.return.x_reshape_shape_": {"doc_hash": "d0701ac3e21e2197bac13ef963c47f5c2c27fe4208c2ecd98ad2d1d81ad68204"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py__separable_filtering_conv__separable_filtering_conv.return.conv_type_": {"doc_hash": "970b1dc6dedfaac6a9bc8e6123d8335ffc317eb9030eb6ee7bbdc2a8199f7dd2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_separable_filtering_separable_filtering.return._separable_filtering_conv": {"doc_hash": "d2d56882265a44e4e04ce954ace8f0c31c0aa8ab119681d2a62edef583957cad"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_SavitzkyGolayFilter_SavitzkyGolayFilter.__init__.self.coeffs.self__make_coeffs_window_": {"doc_hash": "a4a706414018190281d90cecfd18d0ba1a8b68ec8c6d876e6f1a1fb152c2a3c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_SavitzkyGolayFilter.forward_SavitzkyGolayFilter.forward.return.separable_filtering_x_ke": {"doc_hash": "e7f5f525df70cfc45f29aa621828dc8d8081cf16b1eebf7a0242b939bcec4e54"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_SavitzkyGolayFilter._make_coeffs_SavitzkyGolayFilter._make_coeffs.return.torch_lstsq_y_a_solutio": {"doc_hash": "9a7cdf35a0db0ed5eae3d90bed874c5530680c206a428560c076478b2fdd75dd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_HilbertTransform_HilbertTransform.__init__.self.n.n": {"doc_hash": "8a953c67a61a9ee43fb101779102054571730ae5c2712fc45ebb6bca446585c0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_HilbertTransform.forward_HilbertTransform.forward.return.torch_as_tensor_ht_devic": {"doc_hash": "353973f792d6e25055501610195fc9e07b20bda3574f2a5d047923be178d193d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_from_typing_import_Option__GridPull.backward.if_ctx_needs_input_grad_1.return.None_grads_0_None_Non": {"doc_hash": "546732c949ee5b8a182e6daa75c2809d4ee191f7cc99c56ddfc40b6bf722cd2a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py__GridPush__GridPush.backward.if_ctx_needs_input_grad_1.return.None_grads_0_None_Non": {"doc_hash": "4f5c3c53de5829a956252df1cc9b5ffeb5170f6494ba7558a307c8ee8ddfcb02"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_push_grid_push._Convert_parameters": {"doc_hash": "a83ded6679d26f9b4a14bdd97e43041cabc646e82e7f9038af1a4d472af52e42"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_push.bound_grid_push.return._GridPush_apply_input_gr": {"doc_hash": "2accff9ccaec5ec30880cd645ed27d6309e4fcdb0cfb620257749a0acffa4fa8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py__GridCount__GridCount.backward.return.None_None_None_None_N": {"doc_hash": "9253c7926b92d55eebdb0eed741970bc421891270aacbd9b86d2799979320025"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py__GridGrad__GridGrad.backward.if_ctx_needs_input_grad_1.return.None_grads_0_None_Non": {"doc_hash": "2ddd10e40748af04c4444504afa4716dd90c852da0778322f8be8e4ba837f39b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_AffineTransform.forward_AffineTransform.forward.src_size.tuple_src_shape_": {"doc_hash": "6aac4cc0b91ae9940ced1a9193a69746ebc8198dd96a67ff6ebea7807d44c9d2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_AffineTransform.forward.dst_size_": {"doc_hash": "85e507181fd035f87388b165fb721dced12e80d35e04ffde0a08175cd12c08ad"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/__init__.py_AHnet_": {"doc_hash": "161104fa16275249e14e167de2647c784792865586791f025c0aa34a30731439"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_PSP_PSP.__init__.if_self_upsample_mode_.for_i_in_range_psp_block_.self_up_modules_append_": {"doc_hash": "ca1ad11ce8cd00fbb962da76ca8a5d3e8ebd70e2e9f20cc123b4845c35297639"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet.copy_from_AHNet.copy_from.for_i_in_range_1_5_.for_m1_m2_in_zip_layer_2.if_isinstance_m1_self_no.copy_bn_param_m1_m2_": {"doc_hash": "f016fc751565a85257939208b749ecf3223e27b7e96205e211d0eb2bce9ea713"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_copy_conv_param_": {"doc_hash": "ba7438b32ef31580afc4a355c500164cba87e9044c282de29f92b7081ab7f950"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/autoencoder.py_from_typing_import_Any_O_AutoEncoder._get_encode_module.return.encode_layer_channels": {"doc_hash": "34ed44094c49f176a99910c52d6f7cf5e5c56a0b18ee13fca9b49d9960f86f3d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/autoencoder.py_AutoEncoder._get_intermediate_module_AutoEncoder._get_intermediate_module.return.intermediate_layer_chann": {"doc_hash": "29c1b3777830f476813985994afb1eaca208cf5c55cceabacc6beaf54ef096d1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/autoencoder.py_AutoEncoder._get_decode_module_AutoEncoder._get_decode_module.return.decode_layer_channels": {"doc_hash": "c807f8bc97552c59d3890915d7ecf8622797d038f2387470f325a6754a95a2cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/autoencoder.py_AutoEncoder._get_encode_layer_AutoEncoder._get_encode_layer.return.Convolution_": {"doc_hash": "6bd57cb97c41a13b98621f8e398551d81a949d0ecd3f1b4805f1738888b73371"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/autoencoder.py_AutoEncoder._get_decode_layer_": {"doc_hash": "f870d59e7435f6d9e89a540a5d7e6999a8a37a5f7e5af5a66ff5caaad4133b57"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/basic_unet.py_from_typing_import_Sequen_TwoConv.__init__.self_add_module_conv_1_": {"doc_hash": "260fab7a59fc1c01b1ee0611264189b563790cd5302be2fc1e94af01085a4c00"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/basic_unet.py_Down_Down.__init__.self_add_module_convs_": {"doc_hash": "7a56e2bed9a7c881e5b4d0e849edbd75e71f61f8d8da8cf6b38be9b5ae1f1a1a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/basic_unet.py_UpCat_UpCat.__init__.self.convs.TwoConv_dim_cat_chns_u": {"doc_hash": "95cdb7706d3a39f41af0b0355136161a8076fe09f5412b3103311ff9f36d99f6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/basic_unet.py_UpCat.forward_UpCat.forward.return.x": {"doc_hash": "a04729e1536a85d3f318c18c6653a0f457691a565a99de27f9dbc4f61f8ae0bc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/basic_unet.py_BasicUNet_BasicUNet.__init__.self.final_conv.Conv_conv_dimensions_": {"doc_hash": "7853c90e1bef8eb777c27fb27929d86f1b4331efbd00e4cf0543da2f5301a58b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/basic_unet.py_BasicUNet.forward_": {"doc_hash": "35fdf34c2179c5f3885be77f80df8f30d7ccdf8f6d4b3c23029f464112423a57"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py__load_state_dict__load_state_dict.model_load_state_dict_mod": {"doc_hash": "186566dbd9bd97c625c666451553b3c7f5900797ca7ca5c6dc54ba7329656ef8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet121_DenseNet121.__init__.if_pretrained_._load_state_dict_self_d": {"doc_hash": "803da160ad1f09fd31910e242ba5196b67d259384b3b475c2276f5fe4c93fc0b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet169_DenseNet169.__init__.if_pretrained_._load_state_dict_self_d": {"doc_hash": "3b27ff3648f5478bc75703bd322f1d398866e6d75f0d3b0722e3436bed8887a4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet201_DenseNet201.__init__.if_pretrained_._load_state_dict_self_d": {"doc_hash": "21e853d6a86f188eaf32e73149a67008b7063e3b7cdfb0799c4387e9e5ae0f85"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet264_": {"doc_hash": "1eb40f312c8957b944bc3765ce74129d59a830e64d29c1965577fd68ff1dfeb3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_from_typing_import_List__DynUNetSkipLayer.forward.return.upout": {"doc_hash": "9cdbbb6149b099ed34d79bec92777e791c9deaad24beacca8527a042c5d16960"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet_DynUNet._": {"doc_hash": "9564c1b9675aad901c029194f50794d1ea23bef47b88eff40cd75cbfbe68cd85"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.__init___DynUNet.__init__.self.skip_layers.create_skips_": {"doc_hash": "1e5d30c8e941f3055d4b533065cd9a72c5111bdc53c30d62895d30d91be236d3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.check_kernel_stride_DynUNet.check_kernel_stride.for_idx_in_range_len_kern.if_not_isinstance_stride_.if_len_stride_self_sp.raise_AssertionError_erro": {"doc_hash": "25d05f985dc6dfc899f38772969be6c03a37a92de4b1d82947768a7d39af4217"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.check_deep_supr_num_DynUNet.get_upsamples.return.self_get_module_list_inp_": {"doc_hash": "087d2ec41f93c4ce65a57b491003cc2d30dd88bdb1c37dc79a92886b4eeca546"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_math_efficientnet_params._": {"doc_hash": "da2048899726c7725be7adce64103ed9b2cf2e4ab9edfda89e42ea9ab46bb881"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_MBConvBlock_MBConvBlock.__init__.self._swish.Act_memswish_": {"doc_hash": "0a148341683f0fc3f37b711580d69f8c48a8b16b2afee1ec44394bdbd58e69f7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_MBConvBlock.forward_MBConvBlock.set_swish.self._swish.Act_memswish_if_memo": {"doc_hash": "470824d803befeb173168e762f8ed1d49df355a59940290aa8664a078e368630"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_EfficientNet_EfficientNet.__init__._block_index_counter": {"doc_hash": "207d8d7fb438628760dd1d685f31853469a80eb8252b96035ec1be16c93b0cba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_EfficientNet.__init__.for_block_args_in_self__b_EfficientNet.__init__.self__initialize_weights_": {"doc_hash": "bacb36770aac7f77b36db5b04a1c238e39de9264c49c88a01c285a159b8e8e8b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_EfficientNet.set_swish_EfficientNet.set_swish.for_block_in_self__blocks.block_set_swish_memory_ef": {"doc_hash": "9846a2ac8894736486034fbc0cf93875b7c871c8b55192b1cd9c38eec5439129"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_EfficientNet.forward_EfficientNet.forward.return.x": {"doc_hash": "b3574826f88a78a2de7daf71d70c8c366ccc9841fd8a5def9ca28f6a9830990e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_EfficientNet._initialize_weights_EfficientNet._initialize_weights.for___m_in_self_named_mo.if_isinstance_m_nn_Conv.elif_isinstance_m_nn_Lin.m_bias_data_zero__": {"doc_hash": "da68408a73e47e11629096559261703c08a034aaba5075e48f0f6d58e58b551e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_EfficientNetBN_EfficientNetBN.__init__.if_loadable_from_file_.else_.print_": {"doc_hash": "1b497abc8ab265d06bd8437fd9860f2850a2961a8051f34edd08383043ee669e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_get_efficientnet_image_size_get_efficientnet_image_size.return.res": {"doc_hash": "14ef23eeedbc833812f964d1d904c9664352235d2703e31ec3afa3f24998cfbf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py_drop_connect_drop_connect.return.output": {"doc_hash": "3bd4fe9790e303b2d6bbc694d0e480e574bd6cc60a7817cad2eb2058da77aa72"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py__load_state_dict__load_state_dict.if_ret_unexpected_keys_.raise_ValueError_Missing": {"doc_hash": "673fcf743f1119df994e4020151253789a67253d9a7f5ddfb25ba74743edccf9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py__get_same_padding_conv_nd__get_same_padding_conv_nd.return._paddings_ret": {"doc_hash": "fc42ab96fe7c208ad61beefe7a8db3248e5381ed59be43c2015fa22e56267198"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py__make_same_padder__make_same_padder.if_sum_padding_0_.else_.return.nn_Identity_": {"doc_hash": "737d187a84d7c696de85250203650330235e38a54079b2c8f8f966665b6b5515"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py__round_filters__round_filters.return.int_new_filters_": {"doc_hash": "3ba129f5a8e745caee11954a83c239e761f03b27506db827666c9908c3874c83"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py__round_repeats__round_repeats.return.int_math_ceil_depth_coeff": {"doc_hash": "1253f81b05cea6c3d7ee7c900e95a4cef9c31c4ebd1e38f748dbd11a3cd7e133"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py__calculate_output_image_size__calculate_output_image_size.return._int_math_ceil_im_sz_st": {"doc_hash": "61e62a64d16148d72ac269cc92a5505166c77b273f852fc10ec2a7488e07273a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py__decode_block_list__decode_block_list.BlockArgs.se_ratio.None": {"doc_hash": "da4bf1383469febe939d60d9109a7c9f1d8e492e3ffdb41e92de8404233a3c37"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/efficientnet.py__decode_block_list._decode_block_string_": {"doc_hash": "5794ca0e10acea93acfbbcb59afed71ed0a1f2ecf588232747558e2281afcd0e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/fullyconnectednet.py_from_typing_import_Option__get_adn_layer.return.ADN_act_act_dropout_drop": {"doc_hash": "d7780b51c0a6009328ee58c792184f4f663cc55cc8021730830167042b6e9609"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/fullyconnectednet.py_FullyConnectedNet_FullyConnectedNet._get_layer.return.seq": {"doc_hash": "b748b6002b69c308341b851f6a5dab319dade1c21312dd3140d4367f2823983d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/fullyconnectednet.py_VarFullyConnectedNet_": {"doc_hash": "dfdf9b9897d5d8ef2011f80aa9372963952ba5e63a4d1acecc366f7e08eb6e88"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_HighResBlock_HighResBlock.forward.return.x_conv_torch_as_tensor_": {"doc_hash": "1db19dda34c9aa4712e2fc5e6701884d736b2917dc407fb029a3059cd534d3cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_from_typing_import_List__RegUNet.build_layers.self_build_decode_layers_": {"doc_hash": "c85d8b1bcb6cb489e348c3fd3b3cb712158d6ba5937b551546f6183f3dadd168"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_RegUNet.build_encode_layers_RegUNet.build_encode_layers.self.bottom_block.self_build_bottom_block_": {"doc_hash": "cb7cc4afb8f6f4739bd7f8e4862984e8abb175a9f2065cb0d0cd2e2dcae174ed"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_RegUNet.build_conv_block_RegUNet.build_bottom_block.return.nn_Sequential_": {"doc_hash": "ee6e93e0040d8aa0b674bb547e7f89e565f9ec837d9ae1d2bb7b2ad2e7577c2c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_RegUNet.build_decode_layers_RegUNet.build_decode_layers.self.output_block.self_build_output_block_": {"doc_hash": "243be43415066d57825b054c4494f5f1a29cb33ece2c196da7b814f0ba24788d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_RegUNet.build_up_sampling_block_RegUNet.build_output_block.return.RegistrationExtractionBlo": {"doc_hash": "277b2963e00bec7b4e8d3f130fce0f0dbe88b78f646294423916dbe143dbbc29"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_RegUNet.forward_RegUNet.forward.return.out": {"doc_hash": "f2047e0ae0e1e6de31003a9b7f37e016bf5b9f1202ad941fcb3e18be8ae28deb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_AffineHead_AffineHead.get_reference_grid.return.grid_to_dtype_torch_float": {"doc_hash": "37ad7e7f872e846f7a7c701a26f7a7086acbdb71f2a1e8d448c80a2f8f04c620"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_AffineHead.affine_transform_AffineHead.forward.return.out": {"doc_hash": "175511a2807ea45822ab890f021083f5be80f2d6db39dd64f74a3baaf0bcb974"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_GlobalNet_GlobalNet.build_output_block.return.AffineHead_": {"doc_hash": "3aba1d29c73a23e7e2d032f05e2c33ea3d004fbe419ac0d8513cd2af7d45cafe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_AdditiveUpSampleBlock_AdditiveUpSampleBlock.forward.return.out": {"doc_hash": "0799ba2c8346a65c65c6582365cb0d16344bde0bb57e5014d2e163d72dd52295"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regunet.py_LocalNet_": {"doc_hash": "ca8c050a0cb80487757e12934f4239bdef08ff0b4224a9ddfef52f3dee423430"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_from_typing_import_Option_SegResNet.__init__.if_dropout_prob_is_not_No.self.dropout.Dropout_Dropout_DROPOUT_": {"doc_hash": "d1de30a887fb28e78d496e4d40b50332618a3d6111b92605f7d304a79e808a19"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_SegResNet._make_down_layers_SegResNet._make_down_layers.return.down_layers": {"doc_hash": "e1926914e6c49bb65cb8c346cad1626db2774d4f067f5743d4b6ebe3d7b7e4a1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_SegResNet._make_up_layers_SegResNet._make_up_layers.return.up_layers_up_samples": {"doc_hash": "725b92ecac9694d4e0be68da07967cb6086798921580d996c06fd6034a52c4a8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_SegResNet._make_final_conv_SegResNet.forward.return.x": {"doc_hash": "f6c68db891c7c351d6bb396eca368b15efedb291b8b4f3f1da3395b410c1a3e0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_SegResNetVAE_SegResNetVAE._": {"doc_hash": "c10d75afafaf65f893603175075c3f983b4bc4a139daf8b4540d3c88b55f98aa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_SegResNetVAE.__init___SegResNetVAE.__init__.self.vae_conv_final.self__make_final_conv_in_": {"doc_hash": "0327fb15edde7f6cff561ef6aedb5d2841484f2da943ef46ae020f1d12b0ecb9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_SegResNetVAE._prepare_vae_modules_SegResNetVAE._prepare_vae_modules.self.vae_fc_up_sample.nn_Sequential_": {"doc_hash": "ba65b3d1135c9dd1ca3962449858bc175c73f05e7bcfd0d0b9fabee59256125b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_SegResNetVAE._get_vae_loss_SegResNetVAE._get_vae_loss.return.vae_loss": {"doc_hash": "96d34c78cebe07305691a0dafb7876ce7eb8f1c2321f2b28c1f277d75b7fd78f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/segresnet.py_SegResNetVAE.forward_": {"doc_hash": "c5c83bfb1e330164cf5fa63506a85e5aa2f35cbb0d7f5fc2b0fff4e26bc6f6bb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_re_from_monai_networks_layer": {"doc_hash": "bf9c4e05ad9de323fac781e0c7eab4c58adf5399d8d889b4c2ed0fcd8e592713"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SENet_SENet._": {"doc_hash": "2aba69707f6b380ebab261f739032b12c30bfedca49ef55f1956b2b8bb2f3f8c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SENet.__init___SENet.__init__.for_m_in_self_modules_.if_isinstance_m_conv_typ.elif_isinstance_m_nn_Lin.nn_init_constant__torch_a": {"doc_hash": "5aa6c8ce3fd0fcf1337a2c8890fc6104d3a7bbbdb3978f7f27031bb09803ae63"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SENet._make_layer_SENet._make_layer.return.nn_Sequential_layers_": {"doc_hash": "ec0410f4ad8a881b64822658601759ebb4d6d76910602b46a811609359070c05"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SENet.features_SENet.forward.return.x": {"doc_hash": "edc0b671223ae08961ab19b4f05490701370a68d2240ab4748139ccba2feda68"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py__load_state_dict__load_state_dict.model_load_state_dict_mod": {"doc_hash": "15d5a57046edfeaaf5dd68d2270d6b3f8fa4c8a4837c483309e99ba7fb68b577"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SENet154_SENet154.__init__.if_pretrained_._load_state_dict_self_s": {"doc_hash": "e868f45860d6fc6f3dee1671d01b31e728a93d33858551127c60e52973925f58"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SEResNet50_SEResNet50.__init__.if_pretrained_._load_state_dict_self_s": {"doc_hash": "f8ee341f693fe7de8510605ce27a7a9cae47c6749d716d44edd5f8dc47805cd2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SEResNet101_SEResNet101.__init__.if_pretrained_._load_state_dict_self_s": {"doc_hash": "53303686c9d892f22cc47100e9ca9baff8a62b9546ceda80814b0928d1239e8c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SEResNet152_SEResNet152.__init__.if_pretrained_._load_state_dict_self_s": {"doc_hash": "b8fbb51e79a5a57245f90d1b651beca9f76dd3ad7093c86fda24e9f8f31bf4cb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SEResNext50_SEResNext50.__init__.if_pretrained_._load_state_dict_self_s": {"doc_hash": "f2b05478aed105782d4159f1a20a4f4a33ab3b1c67f9b2cc8bc195849ec57dd2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SEResNext101_SEResNext101.__init__.if_pretrained_._load_state_dict_self_s": {"doc_hash": "97d52472a95428f168dbe443a6e51720d6c1f27ff2bc83902c25f13ef86d0318"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/senet.py_SEnet_": {"doc_hash": "dd81f74757eb9e3eeb6cbbf87d3f2ce18204594ee4ce7991694c27ccdd0074c1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/torchvision_fc.py_from_typing_import_Tuple__": {"doc_hash": "fff1f0e66ab8372de4d2ac720e462a90ebf2b9c72aa06a168f45bc6c1cb1d2ce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/varautoencoder.py_from_typing_import_Option_": {"doc_hash": "f85f0b5d8beaf82f10a072baca8e941b95e5d51052a0d06a45cdeeaf465b4f7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_slice_channels_predict_segmentation.return.logits_argmax_1_keepdim_": {"doc_hash": "1ff0cfbfe7817de216742fc6277015564be83b3ddc0f6557ae3c02af3a1d9d07"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_to_norm_affine_to_norm_affine.return.src_xform_affine_torc": {"doc_hash": "ab63889711332fc353306e135c2ba98f470b4dcd7daf253abc4b687aa1385789"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_pixelshuffle_pixelshuffle.return.x": {"doc_hash": "e937ce3a99fee223ebc1b6c5f3b04e87fe3b287baf4520e2f99634bd2207bbec"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_eval_mode_eval_mode.try_.finally_.for_n_in_training_.n_train_": {"doc_hash": "908471833007b507d22b929395564f4ead79a81ee3c0e172186ce041b063f4e8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_train_mode_": {"doc_hash": "d3beee49d8528e17e4a8836668d531765f17b71d1a2473741b68e17f068547d4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/__init__.py_LearningRateFinder_": {"doc_hash": "c03b7d50600fc653e75ecb999cf37f2f4dd2f90eed5ece2495e750edbe62cb40"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_warnings___all__._LearningRateFinder_": {"doc_hash": "0676b9415f7ca9e7648ef7f1dfcd4454d5e787eb07f3a3ff99791da0dcf9cfd5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_DataLoaderIter_DataLoaderIter.__next__.return.self_inputs_labels_from_b": {"doc_hash": "0913d44b0a0d9b21d62b29190f0adcb92edfd8611609b26c18fc6c7ecc6c92d3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_TrainDataLoaderIter_TrainDataLoaderIter.__next__.return.inputs_labels": {"doc_hash": "f7f0e8ac220f336177bb0a2c1affe4bac85349cc95858064271ee6bd342b156b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_ValDataLoaderIter_default_label_extractor.return.out": {"doc_hash": "cf6f8057b72914cb4c77659a4ca9255f9c8bb11979ef508353f6303da09ce1fe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder_LearningRateFinder._Learning_rate_range_te": {"doc_hash": "2c061cf3a02ba920ff2262f2e7f96c118e67b0e68f03be5e653d44a6c68b1ca8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder.__init___LearningRateFinder.reset.self_model_to_self_model_": {"doc_hash": "33a602c88c52e11f23d3a4c497c9503ad1b60adae8b1841f4a37eac2533b9f78"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder.range_test_LearningRateFinder.range_test.trange": {"doc_hash": "011d398e683adfc0d3cc2640cfb2005d887a437e625383a87e36a6b54971b4bc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder.range_test.if_self_verbose_and_has_t_LearningRateFinder.range_test.if_auto_reset_.self_reset_": {"doc_hash": "e2ebfbca49c4e2657d98feabd0a8d9a78b0ca9855ce22ab1fe8f4f3a2ef11e93"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder._set_learning_rate_LearningRateFinder._check_for_scheduler.for_param_group_in_self_o.if_initial_lr_in_param_.raise_RuntimeError_Optim": {"doc_hash": "100f4ce315d9e73bda5134d813b7011d01a8d66ac9693cb57caab1d5d1ad05e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder._train_batch_LearningRateFinder._train_batch.return.total_loss": {"doc_hash": "9f21a26671ce2bd8101a0c22630ebd95718972e5076871c114634bb97a21e2e5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder._validate_LearningRateFinder._validate.return.running_loss_len_val_it": {"doc_hash": "121ce6b0d59c892a1cd6ad9668e25e247d06175cd674d64612d2a36f42d30417"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder.get_lrs_and_losses_LearningRateFinder.get_lrs_and_losses.return.lrs_losses": {"doc_hash": "43400276dd523732ccdd14efe1b48d45cdbddedfc996f2d8b91cfa5de2f24e30"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder.get_steepest_gradient_LearningRateFinder.get_steepest_gradient.try_.except_ValueError_.return.None_None": {"doc_hash": "75552e1187858db2fa7c906ee99db58b773fa8d82c47b1714721507cb68ee6fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_finder.py_LearningRateFinder.plot_": {"doc_hash": "4da2b8ff167b698404b4cd0727db27bf102d8760e5cef9b1b22d42a920ac893a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_scheduler.py_from_torch_optim_import_O__LRSchedulerMONAI.__init__.super__LRSchedulerMONAI_": {"doc_hash": "5afb97ea294662b81e21f48e8c47852a40ee3fecdd4df8ae39b1ee6abb03ac48"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/lr_scheduler.py_LinearLR_": {"doc_hash": "b916ced1fed5f3137f6e6dfa2a6c3d99b9fb6a53b169b129bff2496021407d7f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/utils.py_from_typing_import_Callab_": {"doc_hash": "83d0bf2c8b86ae284a1c69aa24156221216de925b72e0977ad1599bee6b77285"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/__init__.py_FunctionSignature_AdjustContrast": {"doc_hash": "6c2e7cebd554dc6e5227a040cbb5930489a7bcefcbdaf3c70ef61fe83861db7b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/__init__.py_AdjustContrastd_Affine": {"doc_hash": "b97d3d91319f5b8f606a47a53b8ce7a609a0d21c897ccd3eb5fd45b92bab6341"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/__init__.py_Affined_AddChannel": {"doc_hash": "e4bd4a03d0c963e2b10c40170359c67e73722c97e6f16f66f85569749f201152"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/__init__.py_AddChanneld_": {"doc_hash": "195752a62caf21c23e4ba03cc68f38ddff7bd369795d6483c432d36147c3392e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/compose.py_warnings___all__._Compose_": {"doc_hash": "964dd3c1c256b39c48da673e6c1d3290dbf7c11d3e7dd623b429da4c05a5de12"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/compose.py_Compose.set_random_state_Compose.randomize.for__transform_in_self_tr.try_.except_TypeError_as_type_.warnings_warn_": {"doc_hash": "c8acfa2904c825a3b9d71b9cf5d35ae9fa40e1422f84f7f757a0740af816116b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/compose.py_Compose.flatten_Compose.flatten.return.Compose_new_transforms_": {"doc_hash": "60d2a17610d09c7aa18bbf2b2f8f78061953589ff4d4ebe830bf8be169a032c5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/compose.py_Compose.__len___": {"doc_hash": "2b2fefadd90d5e560c262744a5e134d98c6bcb216be1a231d0815cbc2029c836"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_from_typing_import_Any_C___all__._": {"doc_hash": "e2f88ba872535bc87c91a1584cdc4e6effec3458e0c651806a1d8edb6a723b85"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_SpatialPad_SpatialPad.__init__.self.mode.NumpyPadMode_mode_": {"doc_hash": "2b4af0e9811af4fc44c08d2a9bf4fceaa509dee5c7f9982157eef7dc8e17d530"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_SpatialPad._determine_data_pad_width_SpatialPad._determine_data_pad_width.return._0_max_self_spatial_siz": {"doc_hash": "9442f42d94c5bd396358f116602bd9a891dfc9b6c77fc88f1826632955d50181"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_SpatialPad.__call___SpatialPad.__call__.return.img": {"doc_hash": "1c655590a586917007fd53b63e6ecded6ceb3bd6411855de98c2bcc41fcd34f5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_SpatialCrop_SpatialCrop.__call__.return.img_tuple_slices_": {"doc_hash": "db6f1b704665f1844409455bff2560075a7df2949387c81fd40c873062ffc663"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandSpatialCrop.randomize_RandSpatialCrop.__call__.return.cropper_img_": {"doc_hash": "d57c0f59defd024a4a7906c9b78d0873f14dc52f1e13b74b66812a789d28c833"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_CropForeground_CropForeground.__init__.self.return_coords.return_coords": {"doc_hash": "ef833c5759eeeb67d14955bd4888cdbe54a359c73061b44dc51213e212235f0b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_CropForeground.__call___CropForeground.__call__.return.cropped": {"doc_hash": "dd731ac1271629f5e76b4d67a8ceda825e0231500c8f3de3b9037aa7c9af3c42"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandWeightedCrop_RandWeightedCrop.randomize._using_only_the_first_ch": {"doc_hash": "65c134756819e58a1e9de5412d368d28e5931065bce62076c6ee718c86edbda1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandWeightedCrop.__call___RandWeightedCrop.__call__.return.results": {"doc_hash": "6797a46a2c3d6fff672e2444c28526a04eec377da2e904677c113407338fa3e0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_ResizeWithPadOrCrop.__call___ResizeWithPadOrCrop.__call__.return.self_padder_self_cropper_": {"doc_hash": "e31c496a6b36c9b25af8429a1aa712e87f9573e9e107c25d0245ba3757967b21"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_BoundingRect_": {"doc_hash": "0ce59a8536b4d97a9f8f7381c00b8b455498b345e000952350f15db637f739c2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/batch.py_from_copy_import_deepcopy_replace_element.return.batch": {"doc_hash": "09716086bceb5f14e6047031c2233c580a30a28367001b48415da5cf1a596aab"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/batch.py_PadListDataCollate_PadListDataCollate.__init__.self.mode.mode": {"doc_hash": "be2a884eaeba8f691459c5227e9a0c6673990568993bdf829c78526ba59094fd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/batch.py_PadListDataCollate.__call___PadListDataCollate.__call__.return.list_data_collate_batch_": {"doc_hash": "9f5197321d4f0ff6fe21d3b3658cf27dd35a1ff3a71e5c1f5994e8917340b240"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/batch.py_PadListDataCollate.inverse_": {"doc_hash": "347cec3768570422d8319ae0ae32a606ad0897a5bd42ac77d1d766a624e4de57"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_from_copy_import_deepcopy_NumpyPadModeSequence.Union_Sequence_Union_Nump": {"doc_hash": "a4eb0077ca8fd9899b2c639a34f2b65397a6ec0bc93c873d609bc124ff437612"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialPadd.inverse_SpatialPadd.inverse.return.d": {"doc_hash": "ad90acecf8230d35b7eb0f324eca875c5988e1741382dc9b68da34e3397d404d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_BorderPadd.inverse_BorderPadd.inverse.return.d": {"doc_hash": "8a7775e781bfb86df039c78b77ee124df4057832fe958657a60361e086793348"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_DivisiblePadd.inverse_DivisiblePadd.inverse.return.d": {"doc_hash": "387fda87dedcad6c88e4349e67e0396df18a30255ba2b3498e122b003c70b66d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialCropd.inverse_SpatialCropd.inverse.return.d": {"doc_hash": "a57ab0bbd80365fea8dad17c160dabe05784c8de251439fb7d657b89d740631c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_CenterSpatialCropd.inverse_CenterSpatialCropd.inverse.return.d": {"doc_hash": "9233ac821183a151648f0a4ced27c3c6c60eae3b5d0622c6143f4f75110e9058"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd.inverse_RandSpatialCropd.inverse.return.d": {"doc_hash": "ca51adf52e98afb393cf7462e2c88045f9426113ac0e2e20fdb31c3390efd3bb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropSamplesd_RandSpatialCropSamplesd.randomize.pass": {"doc_hash": "e0a93bfbfe9a4ecccb626a6dcad2bd36895cfd63ebf8ed58da9865cbbd5b9a17"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropSamplesd.__call___RandSpatialCropSamplesd.__call__.return.ret": {"doc_hash": "4e176e72f39e1a850cd0fa44fff6bc4489314fb45dd7e9868cf23ad90f3953fc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_CropForegroundd_CropForegroundd.__init__.self.end_coord_key.end_coord_key": {"doc_hash": "60af94d48023e4287ff48f31ed46262d27a5c8846562f12cb51aa7740095e3c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_CropForegroundd.__call___CropForegroundd.__call__.return.d": {"doc_hash": "fef44864510d90324acd7013c62c7bd153c720ba9504c7151208f8816af73d2b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_CropForegroundd.inverse_CropForegroundd.inverse.return.d": {"doc_hash": "12183eb8119a8d2cf32d5a7f4e56fcbbc5f364c21d844049982f2a8a1c885803"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandWeightedCropd_RandWeightedCropd.randomize.self.centers.weighted_patch_samples_": {"doc_hash": "55376d448bd34443c6cc041b20841375676b900bac87a75418dae35f8216e6f8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandWeightedCropd.__call___RandWeightedCropd.__call__.return.results": {"doc_hash": "14ce3905656ddabdad674212d011ae00657af68783e3105d816c57108a7b8155"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_ResizeWithPadOrCropd_ResizeWithPadOrCropd.__init__.self.padcropper.ResizeWithPadOrCrop_spati": {"doc_hash": "a8806b0fd24b4457db3273229bbf62e5c1877d2311d74f2dff6ff0eb96a28b7b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_ResizeWithPadOrCropd.__call___ResizeWithPadOrCropd.__call__.return.d": {"doc_hash": "2b686f8567755dab182838611d25c02df3a8a9a34aea48c48f319b38bccb2638"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_ResizeWithPadOrCropd.inverse_ResizeWithPadOrCropd.inverse.return.d": {"doc_hash": "d46bb1bb20677014fced36ea5db3c778f7256ef2e59dcc9b5abeb0bc2097cde8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_BoundingRectd_BoundingRectd.__init__.self.bbox_key_postfix.bbox_key_postfix": {"doc_hash": "0313af1a19526bfc26402722fa22413aeba69c73dcf6fa44f544d214787062b6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_BoundingRectd.__call___BoundingRectd.__call__.return.d": {"doc_hash": "f94977f633d381c90b202d7fd381b10ea9a6696e86e6f63b43f6c12b2cf70ee4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_from_collections_abc_impo___all__._": {"doc_hash": "c40d48120103e64e5f4bba2fd041fb31f589b49e2cbc8911bf69d103367426ef"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianNoise_ShiftIntensity.__call__.return.np_asarray_img_self_of": {"doc_hash": "3904c70d6bbd7f6e0756e424d24d3a0f56b330d9ccf7dfa3400d5b90c311c969"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandShiftIntensity_RandShiftIntensity.__call__.return.shifter_img_": {"doc_hash": "4eed2f8a7633c7bf4c497776fc5c908ddba751c5b93d132e8eefcdbb6f3d7722"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_StdShiftIntensity_StdShiftIntensity.__call__.return.img": {"doc_hash": "a7228783831f1acb3633ade0a306df54be065d28bfeae2816d319b58a187fe4c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandStdShiftIntensity_RandStdShiftIntensity.__call__.return.shifter_img_": {"doc_hash": "b1ba7faf35eaf7d82877c0dd80c603fa196dd59b1a48cb3856d9558ec6e7a491"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensity.__call___ScaleIntensity.__call__.raise_ValueError_Incompa": {"doc_hash": "9d067ce8793927e7edca397dfa72661a62e34e240a25b66f690ba5acf005811d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandBiasField_RandBiasField.__init__.self.dtype.dtype": {"doc_hash": "adbc74282f28177e028c8090ebbe26db7409688c9b6e7d1f33bf3f4c63e164fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandBiasField._generate_random_field_RandBiasField._generate_random_field.return.field": {"doc_hash": "734cce74c536d8ff4d614144c2b9c219122d81f4209fb4ce63b2ee718aec6237"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandBiasField.randomize_RandBiasField.__call__.return._img__bias_fields_asty": {"doc_hash": "ab0eb9de51397d2c9abb5f507847e32fa8b5b194bda9ecf53a68e386a338d5e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_NormalizeIntensity_NormalizeIntensity.__init__.self.dtype.dtype": {"doc_hash": "92264077dfe17a918bd4ac4ce71e5e4aecdc8a25fd85a3f2d7acd754e303359b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_NormalizeIntensity.__call___NormalizeIntensity.__call__.return.img_astype_self_dtype_": {"doc_hash": "61890df37e955d37ebf43440916ea7287f4c46735297fe407c94dac8eac2c7e8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ThresholdIntensity_ThresholdIntensity.__call__.return.np_asarray_": {"doc_hash": "daab3650b5c2dcb251d883d36a21dd371a53be0ccd39d349cc5dac03090cab7d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_MaskIntensity.__call___MaskIntensity.__call__.return.np_asarray_img_mask_dat": {"doc_hash": "0d54934e90462b88bdc8ad514eff50934ba34c5276537221af3c6ccbed0b8d14"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_SavitzkyGolaySmooth_SavitzkyGolaySmooth.__init__.self.mode.mode": {"doc_hash": "4a32caa37b760eccc2e4e9070f68e8af6e037f906f9e5a84721cd48d60ace9e5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_SavitzkyGolaySmooth.__call___SavitzkyGolaySmooth.__call__.return.savgol_filter_input_data_": {"doc_hash": "6c8eeac4a95d28464a424abd5fdd72aa9d2db9baeaa2458cce1a9cbc1d676c14"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_DetectEnvelope_DetectEnvelope.__init__.self.n.n": {"doc_hash": "70283512bd53eb7bb224e43f92a93f2a6eb562c24239b90a2f497d547bf04377"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_DetectEnvelope.__call___DetectEnvelope.__call__.return.np_abs_hilbert_transform_": {"doc_hash": "24f0ed84546ce0e26e7737eb9e6f3c6a03e81693386b6df7dd9d108c6e6848fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSharpen_RandGaussianSharpen.__init__.self.approx.approx": {"doc_hash": "b16ed2735344548f9a1335b71c26d61818fe1ccc0d17424420237d91770daff2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandHistogramShift_RandHistogramShift.__init__.if_isinstance_num_control.else_.self.num_control_points._min_num_control_points_": {"doc_hash": "42b4d01634c52eb712292f8d949ac6e8ba7ee7f474d94e6a500833442ad6288c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_from_collections_abc_impo___all__._": {"doc_hash": "93e0bbf57792625daddd5dc9eb6022f0121feb37ab5ea119d70c91bf41dbc305"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianNoised_RandGaussianNoised.randomize.for_m_in_self_mean_.self__noise_append_self_R": {"doc_hash": "5e04f12ae6f2809603571fbdb37ba3b5da82c083b88871fdb0989769e53f2ae6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianNoised.__call___RandGaussianNoised.__call__.return.d": {"doc_hash": "d733d41a4aaa7cc7c28d9f1523173573c79843ec4839e5970abc40c4b09a49e3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_StdShiftIntensityd_StdShiftIntensityd.__call__.return.d": {"doc_hash": "d280fe07f5eb463b358ca0688a2cdcc01259f8bc135343c3109e86b94a4a0cfa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandStdShiftIntensityd_RandStdShiftIntensityd.__call__.return.d": {"doc_hash": "add5cb9a18a4490a47cfc13205b7feb0d0b03ad0e080a46cb874371c5d0fbde0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandBiasFieldd_RandBiasFieldd.__call__.return.d": {"doc_hash": "be69ff41be00dcb29a84febb8cf7bd276c3b80d1fc95c843a1b85fd64cb41e18"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSharpend_RandGaussianSharpend.__init__.self.approx.approx": {"doc_hash": "072a71588691ee0aef6be013b235c6c639d73dc2a5c2ddfdc75a7b2f01ad909b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandHistogramShiftd_RandHistogramShiftd.__init__.if_isinstance_num_control.else_.self.num_control_points._min_num_control_points_": {"doc_hash": "4f5fc31346349b7cb8b2c63add4de546b002429e3bec1c5525771828d42f5b9b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/inverse.py_from_typing_import_Dict__InvertibleTransform._Classes_for_invertible": {"doc_hash": "4d2741b987f8f6b3b193932bccdd40ac6edea818051db6ba9d32a426af57e155"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/inverse.py_InvertibleTransform.push_transform_InvertibleTransform.push_transform.data_key_transform_appen": {"doc_hash": "96365757e6d908a1cc49de7a9b7cda453a6c089d014c2d9037bf25e222900cdc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/inverse.py_InvertibleTransform.check_transforms_match_InvertibleTransform.check_transforms_match.raise_RuntimeError_Shoul": {"doc_hash": "2621f9324f2cd9f7c8ee9b2aa596b8630a2fbe1b874e2110c72ca30beb6c8368"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/inverse.py_InvertibleTransform.get_most_recent_transform_": {"doc_hash": "0d8fba91306b1ab3ec7e4c8becdd8092cc7c4e8550dae830b116f6038e733deb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_from_typing_import_Dict____all__._LoadImage_SaveImage_": {"doc_hash": "9e9a7fc0b2b00cb3205f3c428ffa128e4bcf1debc4ad3a8ffad45f0e601d222e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_switch_endianness_switch_endianness.return.data": {"doc_hash": "76d1f550ec9ddab136bf3aa951ac145a6c1a962cb56e54c57a6bb2ad0ca55eaf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_LoadImage_LoadImage.__init__.self.dtype.dtype": {"doc_hash": "c0bd663f3650eeb7939a27922e0b785fd373d0fdc8cfd96d331ad27b2f88e6f7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_LoadImage.register_LoadImage.register.return.self_readers": {"doc_hash": "b8f8fc6cb76b4b062b389007576fb523096c87cdeb500b35df34f53a852eab98"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_SaveImage_SaveImage._": {"doc_hash": "0854a0e5d0dedfef06f4e286d3a6a6219666314b8531812ec63c87acf5c16a4a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_SaveImage.__init___": {"doc_hash": "5889b0d258b1698a9784f216c801985b357ed2811f8c9da4a0cb4cffda8e9f7d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/dictionary.py_from_typing_import_Option___all__._": {"doc_hash": "93b9d98e6ff4bf9eca5740105b596e4a1014adc393de17e0626cbaa874dfa2d0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/dictionary.py_LoadImaged_LoadImaged.register.self__loader_register_rea": {"doc_hash": "a5b2a349c0bf10fbd1784a38db8a99b2eabae21ba26071e6554cc0b93def1cc3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/dictionary.py_SaveImaged_SaveImaged._": {"doc_hash": "cbf571a9cecaacd3ea4bc903fba0205ba2c7071e1d32e258e23b8be174a766d2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/dictionary.py_SaveImaged.__init___": {"doc_hash": "e1b670b911bf5fe7d671d19b687c9757c4b966d2cbc07a6429d68ab45c1a5983"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_warnings___all__._": {"doc_hash": "2db6c7da2cb0ae711338346dd621566d536bb5b4a45520c844084f828b6c80eb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_VoteEnsemble.__call___VoteEnsemble.__call__.return.torch_round_img__": {"doc_hash": "f14e0944b8ed299bf29531c7d9e9ddafd9ae0cee45385069b334bd923d167973"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_ProbNMS_ProbNMS.__init__.self.box_upper_bd.self_box_size_self_box_": {"doc_hash": "7911a37f93c367ce117a4e18568276619f4f077a668b9bffee684b4c04c06621"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_ProbNMS.__call___": {"doc_hash": "765428c15baa65ddc4d63f5f719cc78d2510ceea767516ff6280b731a6734afd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_from_typing_import_Callab___all__._": {"doc_hash": "618e9ef86cd55ed4fac5370824b751b1cdb334921fc529391717dc4a31586a21"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_AsDiscreted_AsDiscreted.__init__.self.converter.AsDiscrete_": {"doc_hash": "06323d01e39c2f5e0f6abcb2210534397e937c2ed0035669e6bdb601050f9695"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_AsDiscreted.__call___AsDiscreted.__call__.return.d": {"doc_hash": "c3f57b87a6df1f0da28590b4d9fac713dd7135053d9f2286d15bac87a1882c78"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_VoteEnsembled_VoteEnsembled.__init__.super___init___keys_en": {"doc_hash": "be9901d1264826902a44b5aac5a65468b1aca92e5e446e81dbef90c8da2f2948"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_Decollated_Decollated.__call__.return.monai_data_decollate_batc": {"doc_hash": "5d381bb584d529788bf4070870b89dffca6e4f3d076ebc03effb54322e3d09f2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_ProbNMSd_ProbNMSd.__call__.return.d": {"doc_hash": "6542602d1f35cdd28ca182faaf3d1a73244f6735199a08b12642a640cdf0342b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_ActivationsD_": {"doc_hash": "be5f826a5101ca67949a1618d3a5f7953b5d47c8514e278bb6077c94dc9ab4c7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_warnings_RandRange.Optional_Union_Sequence_U": {"doc_hash": "0cedaac52bfc449159a13cf7006ea040808259925fce4a54c9f85bd39f9f545d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Spacing.__call___Spacing.__call__._no_resampling_if_it_s_i": {"doc_hash": "736e0197428e91aa2efb9d82642ec906c06793fd00ac7d62d67b0a9222c1c96c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Spacing.__call__.if_np_allclose_transform__Spacing.__call__.return.output_data_affine_new_": {"doc_hash": "7f47602cf792be54932a49c5ce0204d1f6a18518524d40a3cf8ee0b99d288f5f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Flip_Flip.__call__.return.result_astype_img_dtype_": {"doc_hash": "21cddc8ac4d089fbd2e3fdd95d4580b57e829c7d133fba2290939d0809c1600c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Resize.__call___Resize.__call__.return.np_asarray_resized_": {"doc_hash": "106bc49855792cecd608b1e310efa7491ed137bf6000ba64a57644322fec0f33"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rotate_Rotate.__init__.self.rotation_matrix.None": {"doc_hash": "4fbcf3ab53d967517b338eab211491480ed1f5495e11052199c53a76cf79a1a3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rotate.__call___Rotate.get_rotation_matrix.return.self_rotation_matrix": {"doc_hash": "7d1c2291618267c2a4e76f179f3864c5dd8d8e6815c89f2c53948aa9a0811623"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rotate90_Rotate90.__call__.return.result_astype_img_dtype_": {"doc_hash": "89722ab1e1f414d7f3e7ef71acef608feee5addbbfffb320366a973f79e7885b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandRotate.randomize_RandRotate.__call__.return.np_array_rotator_img_": {"doc_hash": "dcbdefc1bbc37a3e53cc5d06c72354b266f5d535e856c1ac3aa1a46650d38401"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandAxisFlip_RandAxisFlip.__call__.return.flipper_img_": {"doc_hash": "997445161c3896ecac7024af8f32c985ed2a83bfde5fbd41fa8d6e438bb53d2a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandZoom_RandZoom.randomize.self._zoom._self_R_uniform_l_h_for": {"doc_hash": "4504276b22a11a5aee5fa11f2be26694b8655c7af834209292773bac836304e0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandZoom.__call___RandZoom.__call__.return.np_asarray_": {"doc_hash": "2f5094fd405a9b61abe32fedd476a8fdf9a8f70fe070a087cace105f35f75a7e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_AffineGrid_AffineGrid.__init__.self.affine.affine": {"doc_hash": "a80e08fcb1306c3ecb8f574cb3bae67900fe939faf477171c8541bce0b2df75a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_AffineGrid.__call___AffineGrid.__call__.return.grid_if_self_as_tensor_ou": {"doc_hash": "4b579814ce99853a9cc339cce5ca6d863e2f19839ff446371ad42946a73b798c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandAffineGrid_RandAffineGrid.__init__.self.affine.None": {"doc_hash": "e735382783aca041aa4d73ce68873c06ca9ac307e7cc3cb4ffbada2be9789696"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandAffineGrid._get_rand_param_RandAffineGrid.randomize.self.scale_params.self__get_rand_param_self": {"doc_hash": "643321abb123584ed1d5022705bd6b963e5d823d87a0dca3d5678746e4b76f99"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandAffineGrid.__call___RandAffineGrid.get_transformation_matrix.return.self_affine": {"doc_hash": "6bb41800f0e64aba7561771d1e7b4d864b00d4d05f7c1c0373eef8ffef26908a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Resample.__call___Resample.__call__.return.np_asarray_out_cpu_nump": {"doc_hash": "7d157b3f33ccf450436491105199ddcba0646dd4c9ef7abeb5fbd2de61dede2e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Affine.__call___Affine.__call__.return._": {"doc_hash": "12f038ea24bcc6de285cb38c4e255dbc3c8a1215ded2ce99ffc18f860772e107"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandAffine_RandAffine.__init__.self.padding_mode.GridSamplePadMode_padding": {"doc_hash": "3453d1ed804d9d2e5c1f147005520f0945e9bce1673bfeffd043b95a8f2e8667"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand2DElastic_Rand2DElastic.__init__.self.padding_mode.GridSamplePadMode_padding": {"doc_hash": "2425c5239d90b01fef0ceab2eb1c846bf5b319c973a3a7797951f2cac64682f0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_from_copy_import_deepcopy_NumpyPadModeSequence.Union_Sequence_Union_Nump": {"doc_hash": "cccd77f0bb6c31b6ab0bf42e3beac6897e4e658d49cbeeae72d5264e07a64f74"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd.inverse_Spacingd.inverse.return.d": {"doc_hash": "37e5d38ca8f523c27d03b1c95104008ad19ab196b4077c1f20e790542cc43723"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Orientationd.inverse_Orientationd.inverse.return.d": {"doc_hash": "02db77c85d90aea816e863dcadf01e735a851936dd42a6ca636dd5bb743b8602"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rotate90d.inverse_Rotate90d.inverse.return.d": {"doc_hash": "debac3ae183da65d462d245aa73497001e38a359ab901655e28ed2701febcee8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotate90d.inverse_RandRotate90d.inverse.return.d": {"doc_hash": "08356fd10ad013655eb698b2900ae30e5333e05850fc8e98cf3a50529e92d94e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Resized_Resized.__init__.self.resizer.Resize_spatial_size_spati": {"doc_hash": "f17fe82955ae6d053f8054d5978e60446ebf718d6795087808bdd767a4440e41"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Resized.__call___Resized.__call__.return.d": {"doc_hash": "56bbb603489dc666eaed60e189f68c5f43855852dabe661916dde3740fff771a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Resized.inverse_Resized.inverse.return.d": {"doc_hash": "2a805f7941a6e8e5746887fdcc09ab604806d8539cb0d2b3a578b53e24189054"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Affined_Affined.__init__.self.padding_mode.ensure_tuple_rep_padding_": {"doc_hash": "b0f0b91effdb63b7f19e9a420135b17aeb873bac73a2626faa8b66f44ce6ae95"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Affined.__call___Affined.__call__.return.d": {"doc_hash": "35896088433307e4cf4a1f634c4b62b4029e31c6fb8db39d41bc2af04913f09f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Affined.inverse_Affined.inverse.return.d": {"doc_hash": "fe35b39bbb1258bfd5d1b8941a737eef5aa296c260c4b0781e07bb1ecffa2ff6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAffined.inverse_RandAffined.inverse.return.d": {"doc_hash": "c5887c36ceb564471126a19fde727e4773a22a641a00c0ac0be23b87c5ca8b0c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Flipd.inverse_Flipd.inverse.return.d": {"doc_hash": "2081ff3081374503945fdc26cb628aadf39f77c3880b8f281a26623e2181e2fc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandFlipd.inverse_RandFlipd.inverse.return.d": {"doc_hash": "d4ef39663b9f0568ad333efbc8ccf673aadd42d1bf7678395a1b17f63fe4be5e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAxisFlipd_RandAxisFlipd.__call__.return.d": {"doc_hash": "4e8502b17b6e9c6553f803d3808f823fda5a305de68b10def6344fc9d96e28f1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAxisFlipd.inverse_RandAxisFlipd.inverse.return.d": {"doc_hash": "bcf21868999069ee172573d0896da4df820ae4d84b9a398530c9b651b6bd6223"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rotated_Rotated.__init__.self.dtype.ensure_tuple_rep_dtype_l": {"doc_hash": "266a3c4b096c8a41fa732128adbea6a0e729743ad53d64d0f1e71d029bd5e2a1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rotated.__call___Rotated.__call__.return.d": {"doc_hash": "2ef0ab604159237608275780c989dc4dd362feacb3d35f3cdeb8bc9f743cffe2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rotated.inverse_Rotated.inverse.return.d": {"doc_hash": "82aa7b85bb954bcbba81c0245b103c82420c6db35f4c89f4b4d0d2d1707b1337"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated.inverse_RandRotated.inverse.return.d": {"doc_hash": "2af5579eea3e88f10ca75a9e7c76001738be8da9e04b4376357437f1db76049c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Zoomd_Zoomd.__init__.self.zoomer.Zoom_zoom_zoom_keep_size": {"doc_hash": "2faac55ac416f99ef0e610ab5d3407203f37529d0b2627960f1f6bbc7105ecc3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Zoomd.__call___Zoomd.__call__.return.d": {"doc_hash": "5e66b1fb51a4bb3d3c037e8e4d016220d2cfe139c1e282128d778f4c9a70cf93"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Zoomd.inverse_Zoomd.inverse.return.d": {"doc_hash": "ab1592bc225f96d048a6ed8a41586dc7fa83e9b1f0792a268ddef3127f2c13c4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandZoomd_RandZoomd._": {"doc_hash": "d4c8a0c83540322be8d39062ff895d50b6d367981fbfa946b251bbed8de5baaf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandZoomd.__init___RandZoomd.randomize.self._zoom._self_R_uniform_l_h_for": {"doc_hash": "9bf9d0f6fbbce3ee53f40ed662c54d7c76f4430d696beeda00c64c1ded765a77"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandZoomd.__call___RandZoomd.__call__.return.d": {"doc_hash": "3626876dce7963970f15e6a9023ee893f6807b0d5278262ef7e5b51896e578fe"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandZoomd.inverse_RandZoomd.inverse.return.d": {"doc_hash": "7d2f5ba87b8441df9d0109114b1dc65cf2624a774b3d89c9b38dc80bbcf0a5c8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_from_abc_import_ABC_abst_apply_transform.try_.except_Exception_as_e_.raise_RuntimeError_f_appl": {"doc_hash": "a662ea7e8b1d9f411208e39d33c41c2697f4527a0ccfcb971d647cf7a5258347"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_Randomizable_Randomizable.set_random_state.return.self": {"doc_hash": "40e3d90a8b0167b71052f7ad51f7204c60f8b48aea096e8c440dfec0d9c3cab6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_Randomizable.randomize_Randomizable.randomize.raise_NotImplementedError": {"doc_hash": "318c20147abb2e7731a75cc928eb1a12318ff501e630c21e171596d58eaef66a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_Transform_Transform._": {"doc_hash": "4861f5befdf074550abca2c81eacce14ec02351a777d5c514495200ef26115a7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_Transform.__call___Transform.__call__.raise_NotImplementedError": {"doc_hash": "b4b56a70d93e9a45a60c1e2fe3ba771ba28954224b71b3f78ed169f9e3cb8461"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_RandomizableTransform_RandomizableTransform.__init__.self.prob.min_max_prob_0_0_1_0_": {"doc_hash": "5e9ef281ee1ac270b14fd890c8895e8f0d3ac02b6bd75e9a88ead32a166d7f25"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_RandomizableTransform.randomize_RandomizableTransform.randomize.self._do_transform.self_R_rand_self_prob": {"doc_hash": "0a1fd5b6f876067d84f370f51f7bd310fc7052f8b240eaf99ac26fdf80379d62"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_MapTransform_MapTransform.__init__.for_key_in_self_keys_.if_not_isinstance_key_Ha.raise_TypeError_f_keys_mu": {"doc_hash": "6c236b0d29af7f9556cfc017eecad896301071ae2c6579dcc2e1c729c4ad5135"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_MapTransform.__call___MapTransform.__call__.raise_NotImplementedError": {"doc_hash": "e412292d99b98aa3f7bcfa762abd16cddc6f7f84b5a674364b46308b398f9af4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/transform.py_MapTransform.key_iterator_": {"doc_hash": "a88b924c6b618e183dbf143eecd35ec082fa31f8f75498abf65939d966e0616b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_EnsureChannelFirst_EnsureChannelFirst.__call__.return.AsChannelFirst_channel_di": {"doc_hash": "7032144aea1cbf784fc95585be1b3dc220e423bc293fcb7f512f90c6ef0d0f48"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_RemoveRepeatedChannel_RemoveRepeatedChannel.__call__.return.np_array_img_self_repe": {"doc_hash": "8236654a77525febeee244b51feb9cec51f7c50a06d927ab3a4bdf20b1f14a0d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_SplitChannel_SplitChannel.__init__.self.channel_dim.channel_dim": {"doc_hash": "87fe3b0a6e1e88c198e178ff9a364f57309305af96817f85c5cb4f5b22a43db3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_SplitChannel.__call___SplitChannel.__call__.return.outputs": {"doc_hash": "3de3eab1001e1ba1ca7bc3b40f605478dd5c01227a05920ecdbab99f91b7db44"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_CastToType_CastToType.__call__.raise_TypeError_f_img_mus": {"doc_hash": "d77153478566bcfaa491e1298c7bc713b05db0be70f78bcf0aa35b02817dd2a9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_ToTensor_Transpose.__call__._type_ignore": {"doc_hash": "9834c7a928cfb467bb2d487fe9d2bc90366e6ba652fb6ed5c11d50888dce0326"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_SqueezeDim_SqueezeDim.__call__._type_ignore": {"doc_hash": "5a475a625c5031075292a1be362b110ebbd0445c486067e273b1def15479ac3f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_Lambda.__call___Lambda.__call__.raise_ValueError_Incompa": {"doc_hash": "6c68c242a60117bef0d6daeae26e05d3f185ddbe99637f090b247a46d202cad2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_FgBgToIndices.__call___FgBgToIndices.__call__.return.fg_indices_bg_indices": {"doc_hash": "f58ea66695ee4376d2d3063a6e85a0c88e017b9534db72f80e9f3ced3ae4b782"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_ConvertToMultiChannelBasedOnBratsClasses_ConvertToMultiChannelBasedOnBratsClasses.__call__.return.np_stack_result_axis_0_": {"doc_hash": "5d16fbe6b20538c274c624401adfb1ed734727d4c89d23ff9363cccde0897cb2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AddExtremePointsChannel_AddExtremePointsChannel.randomize.self._points.get_extreme_points_label_": {"doc_hash": "4004d77621751a3d1afac62ed26d017414116590f172b76c526a6299a1352abb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AddExtremePointsChannel.__call___AddExtremePointsChannel.__call__.return.np_concatenate_img_poin": {"doc_hash": "9ae927f69017ce43945cc7b25f5f617909ae92967888f706d7fc080c647deca4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_TorchVision_TorchVision.__call__.return.self_trans_img_": {"doc_hash": "0b54c05154879d8ff18d377cbb82c9226cdd79fe5d5b88646c346aab0884ac89"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_MapLabelValue_MapLabelValue.__init__.self.dtype.dtype": {"doc_hash": "30ecb495ddd2e40d3e3d3ade7efd22484ff8a9205a74dee9c3a617efa9b1533c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_MapLabelValue.__call___": {"doc_hash": "4cfc099be724664ad8b3d948e2dbf0ac0b5ad6155f14b0d0e2e4056f693c5679"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py___all_____all__._": {"doc_hash": "d67bdba45f19eeddfe63446b0bda1bddbbfc10734c95ad36d015c3307a39f7fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_EnsureChannelFirstd_EnsureChannelFirstd.__call__.return.d": {"doc_hash": "86b3f353c0f0a5dbb2e29659037f3b3ebd4d3fd7a8131e70da53bd96f84783bc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_RemoveRepeatedChanneld_RemoveRepeatedChanneld.__call__.return.d": {"doc_hash": "4f6eaf07a118173a59f833df9753226fb20abb26820c2bea8a769502d2d4bf22"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_SplitChanneld_SplitChanneld.__init__.self.splitter.SplitChannel_channel_dim_": {"doc_hash": "d1f6c204dd9fbc2ab2295192e7e78ae61b72b1fe228a016f6bf95d6e28231103"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_SplitChanneld.__call___SplitChanneld.__call__.return.d": {"doc_hash": "e83e1f4b1391c63f1477d337f352006c7f7affdaa44f8fb3a9d152aa542f97cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ToTensord_ToTensord.inverse.return.d": {"doc_hash": "cc111329bdd5ad38a3822bce5c4563b6b3066b991232cc820e96b71233306551"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ToPILd_ToPILd.__call__.return.d": {"doc_hash": "3a1b2e21233c596da9339dda7d89fcb763551af98a65f83a9dfd803f2909cd94"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_DeleteItemsd_SelectItemsd.__call__.return.result": {"doc_hash": "f734d39bca93174d63b2208b4ccd23609556e5ee8386d6039be127ff0438bdcb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_DataStatsd_DataStatsd.__init__.self.printer.DataStats_logger_handler_": {"doc_hash": "fec56ef262b87dbf84f16d564c9c87bd77a0085c5973962745c124e8471ccf9f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_DataStatsd.__call___DataStatsd.__call__.return.d": {"doc_hash": "047319f78e2c91fb65e520dd8a4ebcd5ad445663d431a06ab809417555be41c3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_CopyItemsd_CopyItemsd.__init__.self.names.names": {"doc_hash": "8270f1c48321def92a551386a255bb463627dab643213184874144d884dcb783"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_CopyItemsd.__call___CopyItemsd.__call__.return.d": {"doc_hash": "b2ba89fd34bec530e9f51671ece3283512defe1ab7a604572cb093a99c793be2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_RandLambdad_RandLambdad.randomize.pass": {"doc_hash": "8a663c713d6978d483a9043b4fc7bcdd9efe2c06dc1522224d4ebf25ab48459f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ConvertToMultiChannelBasedOnBratsClassesd_ConvertToMultiChannelBasedOnBratsClassesd.__call__.return.d": {"doc_hash": "fa8ab3dbb1a95d685f346cd34441d9e2ca5ef7d509bc261035755a6baa93bf7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AddExtremePointsChanneld_AddExtremePointsChanneld.randomize.self.points.get_extreme_points_label_": {"doc_hash": "167f804a67e9c767158a5e8727067401cee76940e09a7d8fb2855933fd48cb91"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AddExtremePointsChanneld.__call___AddExtremePointsChanneld.__call__.return.d": {"doc_hash": "171cd15a1beffbc15821ca04ab48c3920b2162167b586b7b2071e9b8451ebda3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_TorchVisiond_TorchVisiond.__call__.return.d": {"doc_hash": "dbe70c9fd2e104afd9cca7005be55a7adaabafec7d2eba9be4f77c9a6792b78e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_RandTorchVisiond_RandTorchVisiond.__call__.return.d": {"doc_hash": "b8527c50ef379651f5dca7e4470334ef5e0dff3374af9824bf5530d9937d2dff"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_MapLabelValued_MapLabelValued.__call__.return.d": {"doc_hash": "9cf4b91b8c07538ee4b1e301d091a47e71bbe8f34d9c2d56e7d30f38c131070d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_itertools_zero_margins.return.not_np_any_img_margin": {"doc_hash": "af227463988b956e7461b118e94a6c61c56e18b37e6d6453ed494a607772ab95"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_rescale_instance_array_rescale_array_int_max.return.np_asarray_rescale_array_": {"doc_hash": "9a22fc53440e48ae62a99198ab939bfa6fed63a435c203214decab9a53d7fd40"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_resize_center_resize_center.return.img_srcslices_": {"doc_hash": "bad48975fc30b83833e6cb7af7a5b28187bcd3a0de654a2c9a413f0ccd55bf32"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_weighted_patch_samples_weighted_patch_samples.return._np_unravel_index_i_v_si": {"doc_hash": "853bd6347806c5d1e77055f0f485b0c356b946e2534525ce55c8e8a7f97eb021"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_scale_create_translate.return.np_asarray_affine_": {"doc_hash": "f09041ac3e071b2aea123866772aa562b2973921d93e6bfc9c645207afa1dbd6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_get_largest_connected_component_mask_get_largest_connected_component_mask.return.torch_as_tensor_largest_c": {"doc_hash": "e70813750298044d0d06f2ab4ae7688486bd550bcbf84d95733ecbc71503ffa6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_get_extreme_points_get_extreme_points.if_np_size_indices_0_.raise_ValueError_get_ext": {"doc_hash": "6431523beb8bd5de8f237be674e9ccc1cfb0ffeb9255d1ec72135d951cd1afd2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_get_extreme_points._get_point_get_extreme_points.return.points": {"doc_hash": "022fc8660fa5a0a7bbb569c5725383ddcfbdd967ceee69dcfbf2ee1f267c934b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_extreme_points_to_image_extreme_points_to_image.return.points_image": {"doc_hash": "7cbad6cd1f399a9cede555dbce1e817bd63c733074389e50a046a67c49e3308b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_map_spatial_axes_map_spatial_axes.return.spatial_axes_": {"doc_hash": "401674ece6c75a6a63bb76094ab1cd7c2c8f817a6fffb709e7c3b020437f8f49"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_allow_missing_keys_mode_allow_missing_keys_mode.try_.finally_.for_t_o_s_in_zip_transfo.t.allow_missing_keys.o_s": {"doc_hash": "bd773f6278991c309f9eb17ca98bd8e1c117e2a19ffdecca3df70a664dcf8ffd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_convert_inverse_interp_mode_": {"doc_hash": "558a615d1971931e2f3e06ef85a81fde838575c0fc5e5f94eef51160609a8f84"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/__init__.py_alias_": {"doc_hash": "33b2fa399cde5d601d3186db1caeb244b999eb8b17ca5a1c1cff162389097007"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/decorators.py_from_functools_import_wra_": {"doc_hash": "a1bffb82352f4ced8704caf80bbd43be2a5b23a8631e52de1e7b8205a9b5eaa1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/enums.py_from_enum_import_Enum___all__._": {"doc_hash": "fb340ec89ba52c6a091de7681b6cbc62e2bf19e61a2b9e701bf1ecd6d2ab1ade"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/enums.py_NumpyPadMode_NumpyPadMode.EMPTY._empty_": {"doc_hash": "f1a1cc2228b7313c42b6d113ca1a7a39c2d1cf611afb192d47e67ec04569656e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/enums.py_GridSampleMode_GridSampleMode.BICUBIC._bicubic_": {"doc_hash": "44e83d5b5369361cbfb8087e2f48a11d9dd6efd736e0c8d3c4fe9f4ddf658333"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/enums.py_InterpolateMode_ChannelMatching.PROJECT._project_": {"doc_hash": "0dc8d9f16dd52e913f157880da6f9f5a8a5abe524c0d21673aef65c1b5b4c023"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/enums.py_SkipMode_InverseKeys.KEY_SUFFIX.__transforms_": {"doc_hash": "f01225d2aed3860bd24b4d4d45936ba37989175a84e4809790a6a5be3acf66be"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/enums.py_CommonKeys_": {"doc_hash": "cc457bc76036cd77293350d7ae0bc95372459cd438333e1325c52b398caacced"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py_from_enum_import_Enum_plot_metric_graph.ax_xaxis_set_major_locato": {"doc_hash": "cc1bab461b3d713e2df0c36a0029edc63ca4055d8d44e329771d14dee66b346a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py_plot_metric_images_plot_metric_images.return.axes": {"doc_hash": "9c0af12dd66610336e39a279978d3b72960493f025d93c9526d7898ae100238a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py_tensor_to_images_tensor_to_images.return._": {"doc_hash": "39dce084a6d9fa9969da8a476b9bed2b4cc1a92d5dffa2e79891e42d7bbedf0e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py_plot_engine_status_plot_engine_status.return.fig_axes": {"doc_hash": "88468158c128a426bc4d569ed95dfe6875dfa1841b9d7ccf2dc5a7c48c2f4f0f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py__get_loss_from_output_StatusMembers.LOSS._Loss_": {"doc_hash": "ff9fe6a9fa1ba472d063ab74d48c277dabea36ad6b602af4f0658c538375bb7b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py_ThreadContainer_ThreadContainer.stop.self_join_": {"doc_hash": "15ca131e1ba5f2e418956d2d8a2e1acbb1e93274c798197556f5b3eb4ada207e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py_ThreadContainer._update_status_ThreadContainer._update_status.with_self_lock_.self__status_dict_update_": {"doc_hash": "8c16841716a82c6e260843a11d2c37a05ab2a28d65e5261ea66d5f432119263b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py_ThreadContainer.status_dict_ThreadContainer.status.return._join_msgs_": {"doc_hash": "7fe00d67591cad83fc6c6678ee30ddca7b38761f00ea413faaa3772e37e4cce5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/jupyter_utils.py_ThreadContainer.plot_status_": {"doc_hash": "28c11d839009d423284cf33c500b182b5d3f21d510c213a6bbbceb7eb6b574b8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_list_to_dict_list_to_dict.return.d": {"doc_hash": "08ab8a815be21eb58429ab13ac0fa97932c798ce319fb78bfe3cded727906400"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py__torch_to_np_dtype_dtype_numpy_to_torch.return._np_to_torch_dtype_dtype_": {"doc_hash": "5fbf8999cfe5e7c302c2a39a68ba77116f3f54152c7121a367e8c4ff1d05bb5b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_copy_to_device_": {"doc_hash": "f76a120d0f3419b49f84b0e3ceacd9b577dd0b35a48b70a4089376942e9dd36b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_inspect___all__._": {"doc_hash": "eb67d6ba854cd5e535a9cc28b7884fb42c91ce4f44aed53e78d0065485be4c48"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_export_export.return._inner": {"doc_hash": "e883d6e9af40f9ec5dcea8c01b0540448a28fe3c2fe6ad0c90b3a2277346b9f3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_get_full_type_name_min_version.return.mod_version_required": {"doc_hash": "3f0c12811a4e3e4d06a705b28f27f15ee695581033ce67a5ebd1d8cc2b5d0a2c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_exact_version_OptionalImportError._": {"doc_hash": "74ead79fe2423efc9f181f7c0f7240d3371f0c85641464b57628e8aa7a9dd442"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_optional_import.if_version_and_tb_is_None_optional_import.return._LazyRaise_False": {"doc_hash": "9b45b21f970cfe69498fc089deba68f0027e99bd554bb07cf11e3db4311a3629"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_has_option_": {"doc_hash": "34c462001d406616347b3f34de804a0445da7ebf090ff2d4a3a587c4a304d89d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/profiling.py_time_torch_profiler_full.return.wrapper": {"doc_hash": "7394c6792a6abb6167cc1cadf60768e3635d7f9bc71d67a74b3fca32b7cd8a4c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/profiling.py_torch_profiler_time_cpu_gpu_torch_profiler_time_cpu_gpu.return.wrapper": {"doc_hash": "b972a5f7065d7adf9ce1ef79a3d2444d898a30da37a44ef1960cd26ba8b0570e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/profiling.py_torch_profiler_time_end_to_end_torch_profiler_time_end_to_end.return.wrapper": {"doc_hash": "c34354fcaf5bfa493dd3596ab838969627fa5ee86e91f1e3ca8bd00611223fd7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/profiling.py_PerfContext_": {"doc_hash": "c37f83a0a69f0e755691c43da66e2d9dfb53c1031543016f24654aa665034d39"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/state_cacher.py_copy_StateCacher.__init__.self.cached._": {"doc_hash": "38d37b51965394c977c4771c5b2f8e3195799b4cc198726debd08cdbd7595562"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/state_cacher.py_StateCacher.store_StateCacher.store.if_self_in_memory_.else_.if_hasattr_data_obj_dev.self_cached_key_device_": {"doc_hash": "e18be937bf8b310e0189241572d4e7d7e1d9e240f901a7a92ce3395223c1ecaf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/state_cacher.py_StateCacher.retrieve_": {"doc_hash": "010bef2e0457eedf2a52fd8df94b54be23aaa65d3d4f7e8f73c1a0aa4cd6b7cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/__init__.py_CAM_": {"doc_hash": "2f3198246bd7f28a97c75ee4a8313b1b82beb35f65ab365d3a7c764254736c13"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_warnings_default_normalizer.return.np_stack_x_axis_0_": {"doc_hash": "9e12059db6411a8a6874f260f629a9ba8a7308d5345e7b143f15976d89ae589c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_ModelWithHooks_ModelWithHooks.forward_hook.return._hook": {"doc_hash": "9b911886f99a587e92132508b05f596c937b278525c6a2baf999e3ba9079f1c8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_ModelWithHooks.get_layer_ModelWithHooks.class_score.return.logits_class_idx_sque": {"doc_hash": "0b5124338be12bbcb1cc0e15c30ade430419b1255a26538b6c2be096e4acd5e6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_ModelWithHooks.__call___ModelWithHooks.get_wrapped_net.return.self_model": {"doc_hash": "e832cdea3e7e946bce9fe74d10756bc54b83183b9fec771ec0913f45afdddb63"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_CAMBase_CAMBase.__init__.self.postprocessing.postprocessing": {"doc_hash": "eeabd5808cc33ad8abbbd6f42d564158e27129221b84bbf62fddb8c0f1bbb254"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_CAMBase.feature_map_size_CAMBase.feature_map_size.return.self_compute_map_torch_ze": {"doc_hash": "3e566ae150f59394c34789fca062c3250230fbbacf3a329b0ea5a98c17229979"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_CAMBase.compute_map_CAMBase.__call__.raise_NotImplementedError": {"doc_hash": "0082cf7a2ce1c8faedf68f82c150c66f6e575ad7838ef730e08cba97b6f9e6d5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_CAM_CAM.__init__.self.fc_layers.fc_layers": {"doc_hash": "ef1a51b76100f3b7dd938f38099238150da4e5a7d4f3270db56249acf0d623c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_CAM.compute_map_CAM.compute_map._resume_the_spatial_dims": {"doc_hash": "2f931fb217a9cc1b1178703de0f98032da60c8ec3bebc76faf61cbfe065eaec6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_CAM.__call___CAM.__call__.return.self__upsample_and_post_p": {"doc_hash": "b444c105ebe410d0d269c96a5bca3e1545fdadd92bde96195ebf4b9ddb719d65"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_GradCAM_GradCAM._": {"doc_hash": "d414bb89365e064ba2e883c5b8e01d8abc5a8037b3fda21dab594d095225c87f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_GradCAM.compute_map_GradCAM.compute_map.return.F_relu_acti_map_": {"doc_hash": "c8f4b0524893cd78f4ee033e951551e6a586bfb33e951f9d24df5f55e06c4f29"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_GradCAM.__call___GradCAM.__call__.return.self__upsample_and_post_p": {"doc_hash": "a456adbb807489895098613f588a9914e49742c223de93273eb620fa28692f72"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/class_activation_maps.py_GradCAMpp_": {"doc_hash": "60f5e927262263c9f44972f4ba95424d679966bc9e8ae22bc7c8b5b174d6e27b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_from_typing_import_TYPE_C___all__._make_animated_gif_summa": {"doc_hash": "ac120468672a2a0cff45b71fc65c577d36c4a5207a21ba1a7d573486fba02a6a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/occlusion_sensitivity.py_from_collections_abc_impo__check_input_image.if_image_shape_0_1_.raise_RuntimeError_Expec": {"doc_hash": "da3b99080a2f99c856a753570534521d5f098d87220cc176bf94389d65e5b892"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/occlusion_sensitivity.py__check_input_bounding_box__check_input_bounding_box.return.b_box_min_b_box_max": {"doc_hash": "591043d1e2e08538092fe19c9a8ce913185dedc87eaf39b7e3acf142fbc483f6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/occlusion_sensitivity.py__append_to_sensitivity_ims__get_as_np_array.return.out": {"doc_hash": "30bff1af47f08056777f3d8218b66dae031f6c36d8d2dc57daea32634729571c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/occlusion_sensitivity.py_OcclusionSensitivity_OcclusionSensitivity._": {"doc_hash": "50ba1347ecb9ee3e59edb556846f4da090f450baa7648a1d8ba9c10e6ebff095"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/occlusion_sensitivity.py_OcclusionSensitivity.__init___OcclusionSensitivity.__init__.self.verbose.verbose": {"doc_hash": "c8dcffb0ccf608925e76531576f651271265fb3a189a00db9a7b90d148da9cab"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/occlusion_sensitivity.py_OcclusionSensitivity._compute_occlusion_sensitivity_OcclusionSensitivity._compute_occlusion_sensitivity.verbose_range.trange_if_self_verbose_el": {"doc_hash": "47b9fedd819b89e17d931394ae649f065afa5296a291893423bb219bb6c8a318"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/occlusion_sensitivity.py_OcclusionSensitivity._compute_occlusion_sensitivity.for_i_in_verbose_range_nu_OcclusionSensitivity._compute_occlusion_sensitivity.return.sensitivity_ims_output_i": {"doc_hash": "a8fff341d9677dc03d4c9fd92081e9cd65c2255b49a4e3c0da37a3d4a52c8b39"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/occlusion_sensitivity.py_OcclusionSensitivity.__call___": {"doc_hash": "f254202f218da2e1ff22d416441d706a7b4d13f7f0939d237ed7d5af3b4c610d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/visualizer.py_from_typing_import_Callab_": {"doc_hash": "60f5f1aaa7bb59fce599092ca32fc1ff67ad3b5935f4ed6bc3c60c82c39acb6a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/min_tests.py_glob_run_testsuit.assert_sorted_exclude_cas": {"doc_hash": "c64a0049d18e8d46368000149cb572600204ed412b3dd6c0df74c42db8aa987f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/min_tests.py_run_testsuit.files_run_testsuit.return.test_suite": {"doc_hash": "9689b1f0df10b717626ff1b5bbe0d64094e5f04b8ca59c1ecda9bbad3dfd60d9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/runner.py_argparse_TimeLoggingTestResult.stopTest.super_stopTest_test_": {"doc_hash": "e03de69287e6e50c36383cd13491755d00e1786428d890152106bd4d322608b7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/runner.py_print_results_print_results.print_Remember_to_check_": {"doc_hash": "45fb95fd43502826c953cb71e9e17be236b33bc411e684e5d1f2c28974be77b0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/runner.py_parse_args_parse_args.return.args": {"doc_hash": "034c55366badf38ae1f39c4e80251cdde4fc08a962be2d245e74a5d7a06f81e0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/runner.py_get_default_pattern_": {"doc_hash": "9180f675def6ad59c3218cf9bbca478a5db9fa9808d06267dbadc01ded655e8c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activations.py_unittest_TEST_CASE_5._": {"doc_hash": "0cc6a1746a3b3c2e9a295bd76a822885c04fc882d839b79e443ec88eec7adaf6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activations.py_TEST_CASE_6_TEST_CASE_6._": {"doc_hash": "343e787acc30226ac89cd77d36a146c63b2921664aad35f08900d92c9f66540e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activations.py_TestActivations_": {"doc_hash": "f31dd90f7c88c370faaef7b59a220a1c8130a2491d14acb7cb07474a64a1b4f4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_add_extreme_points_channel.py_unittest_": {"doc_hash": "b183af5e2c193c63d94b6092d8c6b2a9ce62c2c13f39d7fb6568e205a477c88f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_add_extreme_points_channeld.py_unittest_TEST_CASE_2._": {"doc_hash": "21ffb87984677b4f17f1ed9c76c73240fd3dea417a54de247530dd52da045dd8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_add_extreme_points_channeld.py_TestAddExtremePointsChanneld_": {"doc_hash": "c49b4f04f2c368a3f9cfd2f224a40bb66aba408a7ac185f7903bed8c80b8e6cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adn.py_unittest_TEST_CASES_3D._": {"doc_hash": "63512f1619c3a62fddcb457740a665cbd99e7cbb9a4ad85344b3eea62b09d547"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adn.py_TestADN2D_TestADN2D.test_no_input.with_self_assertRaises_Va.ADN_norm_instance_": {"doc_hash": "5f7188468be46901dd97e99183375ab449dba676e4ff5dbea11bd69081152cc5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adn.py_TestADN3D_": {"doc_hash": "a1fa29e9cda63a54d60629cc621ae7686761607a26d6ae3fffa51c9ce0689757"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affined.py_unittest_": {"doc_hash": "903e5c281d7bb8b17ce5e6644ad6966178f38ea0c2710e7ae1be9c0f3456d527"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ahnet.py_unittest_TEST_CASE_MCFCN_WITH_PRETRAIN_1._batch_5": {"doc_hash": "17f29015996aab874a7ba7815c10b8dff084fd1a0bd9b1c1cd39d0bbc9ee6b0a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ahnet.py_TEST_CASE_MCFCN_WITH_PRETRAIN_2_TEST_CASE_AHNET_3D_WITH_PRETRAIN_2._": {"doc_hash": "0b879c752ab02b2a539a48a20412ef8a975a3d55f12fb840adb775498b86c2b6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ahnet.py_TEST_CASE_AHNET_3D_WITH_PRETRAIN_3_TestMCFCNWithPretrain.test_mcfcn_shape.with_eval_mode_net_.self_assertEqual_result_s": {"doc_hash": "cf01523dd311a76f019d8d19a188b6241b9ef3160e6722eaa944ce6be63faf11"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ahnet.py_TestAHNET_TestAHNET.test_ahnet_shape_2d.with_eval_mode_net_.self_assertEqual_result_s": {"doc_hash": "0396a14fb47f96126a074929e50a68ce542c12b9c47eda6ca62e81e6b18e6c07"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ahnet.py_TestAHNET.test_ahnet_shape_3d_TestAHNET.test_ahnet_shape_3d.with_eval_mode_net_.self_assertEqual_result_s": {"doc_hash": "36425d7f55f5259aa3069743e87fd0793e9894fe2999bbaa88c812867b9e0dfb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ahnet.py_TestAHNET.test_script_TestAHNET.test_script.None_1": {"doc_hash": "174623bc0d8b7959f686c8a8153c2ba510e67bfeed9fc4a1e43478559bfaa5e5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ahnet.py_TestAHNETWithPretrain_TestAHNETWithPretrain.test_ahnet_shape.with_eval_mode_net_.self_assertEqual_result_s": {"doc_hash": "c91706a1cc283c2d9a382fd80ae148a560ec275cabe30feefabf4573c0af22ec"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ahnet.py_TestAHNETWithPretrain.test_initialize_pretrained_": {"doc_hash": "276fa30c0ae157dd4d9546a54612ea380f3bcba2e677390b64e805d71678532f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_os_TEST_CASE_4._Compose_LoadImage_image": {"doc_hash": "c509e37e3c69ff4f0566e3bbaf32f3328c7a696b68895a797bdcf1735c591282"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_autoencoder.py_unittest_TEST_CASE_FAIL._2_channel_2D_should": {"doc_hash": "46acedd8d090a0cc5bca2f29695940ec11f8aadb6662281028fb2e0b1caf7473"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_autoencoder.py_TestAutoEncoder_": {"doc_hash": "6c79a68d110e16688ca9c27d00df94b2f20477b78507d99d782b54672a2aea21"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_basic_unet.py_unittest_CASES_3D._": {"doc_hash": "6048e2b312c225f5443bd9d0b3123e67e4f449a66443ca3e12d3d77c340ee505"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_basic_unet.py_TestBasicUNET_": {"doc_hash": "4ecc64def24893b534aa269af5b5cb0df282df645d19d48b894585c6351fece9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bending_energy.py_unittest_TEST_CASES._": {"doc_hash": "81500d6910ebc1fc666cfa6b0dced5224b718b4eb9bc4e77d9bb0ef4effbb828"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bending_energy.py_TestBendingEnergy_": {"doc_hash": "702b2869ed6620da24f9044554321f70ca94842279feb4902343ffbc037bbb2b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_approx_cpu.py_unittest_TEST_CASES": {"doc_hash": "8409a9af264de3c2091853535e634e27245c9dc2f0c90b10dc359e9a38527d6f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_approx_cpu.py_BilateralFilterTestCaseCpuApprox_BilateralFilterTestCaseCpuApprox.test_cpu_approx.np_testing_assert_allclos": {"doc_hash": "64513b1b873d112bde7d39977941b273a51d20919a0d14123452ef0be0dd5d04"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_approx_cpu.py_BilateralFilterTestCaseCpuApprox.test_cpu_approx_backwards_": {"doc_hash": "f5bab8fcc2ebd83632fc5ac91a4046179dc93661e01a30f6c5c5beb2eac7dd7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_approx_cuda.py_unittest_TEST_CASES": {"doc_hash": "2dd0c4f23de741d12c607c3aef460b51d676391426ed7cf33a6b4cac46746fba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_approx_cuda.py_BilateralFilterTestCaseCudaApprox_BilateralFilterTestCaseCudaApprox.test_cuda_approx.np_testing_assert_allclos": {"doc_hash": "d4f847d4081baf43629cca1636304ff55ca32be8a0ff32106855a67608d49d0c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_approx_cuda.py_BilateralFilterTestCaseCudaApprox.test_cpu_approx_backwards_": {"doc_hash": "5166551f74cdb0374c952619eaf9b14eb867b5ce61cc3df63418a230aed9e06d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_precise.py_unittest_TEST_CASES": {"doc_hash": "19bd757e329604494fb14731780a104993976323e68945fa1cf52b62f41a47d8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_precise.py_BilateralFilterTestCaseCpuPrecise_BilateralFilterTestCaseCpuPrecise.test_cpu_precise.np_testing_assert_allclos": {"doc_hash": "c56a955bc5d99bb2b1dbd15d10eee2b2e07818770ec723a091a0017235307736"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_precise.py_BilateralFilterTestCaseCpuPrecise.test_cpu_precise_backwards_BilateralFilterTestCaseCpuPrecise.test_cpu_precise_backwards.gradcheck_BilateralFilter": {"doc_hash": "6287540b76d54ca5ebce06d04b066b500d0c8cb952c7ff26057f41029abba398"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_precise.py_BilateralFilterTestCaseCudaPrecise_BilateralFilterTestCaseCudaPrecise.test_cuda_precise.np_testing_assert_allclos": {"doc_hash": "cfcf10f2388150015514c1cdb9be8059af991a129692ab78b04db8747521d719"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bilateral_precise.py_BilateralFilterTestCaseCudaPrecise.test_cuda_precise_backwards_": {"doc_hash": "5e38a307eb72c87dcf916b300ad3767f8c08ba77c75c4b24bc952b14ab447c2c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bounding_rect.py_unittest_TEST_CASE_3._2_16_20_18_0_16": {"doc_hash": "1b6204b6d79d3b48547bafaca49a0faf91804225d28b873a49ad99548767c12c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bounding_rect.py_TestBoundingRect_": {"doc_hash": "991d37874b31e6ebbd53ad11f7ee5a404f8a4fa35c843e241519f564434f9c7b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bounding_rectd.py_unittest_TEST_CASE_3._2_16_20_18_0_16": {"doc_hash": "e25254586f6bc8a24fe4bab31a3649029437fad2e7aa0d56df32a63d75193983"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_bounding_rectd.py_TestBoundingRectD_": {"doc_hash": "5c1e796963925d50f6035ec9843c40c9dd6270789f42ee2e4089eafb1b8ebc5d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachedataset_parallel.py_os_TEST_CASE_3._4_5_None_": {"doc_hash": "d9980ea0a094e1a03225c61d2e1f98f194057632e0d2a53a90e87a48257a5a19"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachentransdataset.py_os_TEST_CASE_1._": {"doc_hash": "4f7d522052e1dcbf2ef5096cb959ff06ddc77192597217a5ff2fd7290166d0f9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachentransdataset.py_TestCacheNTransDataset_": {"doc_hash": "aa35c28fae74895210a496ab4269a5a6564baa03e1c2382b8d967ce60346683e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_crop.py_unittest_TEST_CASE_3._": {"doc_hash": "90b3917fbea1aa2b38e5bb800d6aa409170cf1aefc1be31c24d5a95d2f0a5ac3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_channel_pad.py_unittest_": {"doc_hash": "da05df37d647d0a60f3b115ce4c9ad79e88eed101ab7a6ae6772552607ac5d2e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_data_loader_2_TestCompose.test_data_loader_2.set_determinism_None_": {"doc_hash": "120d86d13d778424aeaa08dc0692ee40e22ea316eaf756eb681cf9545802ee9c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_flatten_and_len_": {"doc_hash": "ba0a329aaf44d6ec521dec17f8f1e639b8743a4d51eea57a0c0640411efb5c82"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_confusion_matrix.py_unittest_data._": {"doc_hash": "52b88d1ebc36d8bda4018e8dffb56d2a7cc69095a912d7b929708b9f291e95d3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_confusion_matrix.py_data_nan_TEST_CASES_COMPUTE_SAMPLE._": {"doc_hash": "2ec7bc2aa4831a15392d4cb32a205ed622abe265e6cbcd5bec11dcfc1a023d56"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_confusion_matrix.py_TEST_CASES_COMPUTE_SAMPLE_MULTI_METRICS__one_input_to_compute_mu": {"doc_hash": "a5b110f536cd85bed9413655b02dad6732d4db5c5731a61a4a9c0fdc40854255"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_confusion_matrix.py_for_reduction_in_mean__TEST_CASES_CLF._data_clf_copy_result_": {"doc_hash": "233c969279e62716c36b6e5660d9b7cbfa1740c929aee9e74b11c4160b316d4b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_confusion_matrix.py_TestConfusionMatrix_TestConfusionMatrix.test_compute_sample.np_testing_assert_allclos": {"doc_hash": "0fd0130fd1f7b08a9a99ff10fe927e1aeeb68a3b01f195fa1355c26d800b8e0d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_confusion_matrix.py_TestConfusionMatrix.test_compute_sample_multiple_metrics_TestConfusionMatrix.test_compute_sample_multiple_metrics.for_idx_in_range_0_len_r.np_testing_assert_allclos": {"doc_hash": "820cdf745736cd83d87e673f498e855f9f9c4a14af3ba1595d7a288322464c31"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_confusion_matrix.py_TestConfusionMatrix.test_compute_sample_with_nan_": {"doc_hash": "9e997a90f4c8e7f9f885a58b686a9b9fb20e697f3580baab9727faf9c84d664a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_froc.py_unittest_TEST_CASE_5._": {"doc_hash": "70427c8438fe764edfa14ede95753c86fe407f52d7e1d49ac0e47677bd45cf35"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_froc.py_TestComputeFpTp_TestComputeFpTp.test_value.np_testing_assert_equal_n": {"doc_hash": "cbbe76c8fd6998e918e1b930f5db7cd098bba71d84ea15aed5ac1e2ad481bbf9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_froc.py_TestComputeFrocScore_": {"doc_hash": "515ff4594e69adbe9e50565f063fdd512c4b06b5f53e1a513e422bfbc46bfb7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_unittest__should_return_Nan_for_a": {"doc_hash": "ae4845513561748106337616e02fabeda7164276f15545bff8d24893e377a758"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TEST_CASE_3_TEST_CASE_4._": {"doc_hash": "35eb4719746a0f44728cdc1d3eb2dfcf96f57c8d2b87d1937a26b972fb6f0375"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TEST_CASE_5_TEST_CASE_6._": {"doc_hash": "c690ab90d66393bbbdaee3077437ab93b1869d1f26a2309013bd6e5a2bd9927e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TEST_CASE_7_TEST_CASE_9._": {"doc_hash": "89bee08190ae0fd96f424e78f228deb84d2b1f46cb91c6f8f0aca2a68c6f1d6d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_roc_auc.py_TestComputeROCAUC_": {"doc_hash": "9f2f205b1d9c508a0f891ebd21d3a2a722dcb56b4e0487f2c47666a06a7f4e8a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_concat_itemsd.py_TestConcatItemsd.test_numpy_values_TestConcatItemsd.test_numpy_values.None_2": {"doc_hash": "eb8c9b20e98f523f78a35accc8d5f8ae17a3b16cc7eb9a0ef614c3135ee8afdb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_concat_itemsd.py_TestConcatItemsd.test_single_numpy_TestConcatItemsd.test_single_numpy.None_1": {"doc_hash": "216834c086ebfb7c8eb7cdfc959b1d78628af5e2d5a1f8a0a8cd96e16b8f916a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_concat_itemsd.py_TestConcatItemsd.test_single_tensor_": {"doc_hash": "aa42c2d69a60918c0308c86f5c4812f989528b66a84a57f0cac8297d0f6ad3dc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convert_to_multi_channel.py_unittest_": {"doc_hash": "3716aa38760b036b770e0eff07269a1c847c8116f3b6c9acbd6969b234c64576"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convert_to_multi_channeld.py_unittest_": {"doc_hash": "d46d6bb2f88b69ab79ca48cffd97eb772d0b15f7d4c8033f7b499df33cadf1fd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_TestCopyItemsd.test_array_values_TestCopyItemsd.test_array_values.None_2": {"doc_hash": "086fe39aed3159055907659b645c07d205aac09d4ff9e04b604861717bd9ddb0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_TestCopyItemsd.test_graph_tensor_values_": {"doc_hash": "25ef74fd0f95229880c2105425a728832a702fe520fefc7665b2e96bfc1e1c20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crf_cpu.py_unittest_TEST_CASES": {"doc_hash": "ef90ec450e5f6deefa57dd184803629868d7c917627fc46c9e8ded390acc6466"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crf_cpu.py_CRFTestCaseCpu_": {"doc_hash": "155e43d67dab8a1a944a1d7f7c155f9d1f85e0ec358bfa3ad23ce6b059a9c987"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crf_cuda.py_unittest_TEST_CASES": {"doc_hash": "90ad35b5113a9fd589bd359ff6b2bfcad515cdf72cf3e156002f2d17bef7d68b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crf_cuda.py_CRFTestCaseCuda_": {"doc_hash": "0ede628f4920dcb2d5e31de30fe2c7551f677804206ae036dc22db9ddbab80c8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foreground.py_TEST_CASE_5_TEST_CASE_5._": {"doc_hash": "7603f9613e04007fb8a6520fdd74ee9d6a183356eb9df9a614dc6d94bd4b426d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foreground.py_TestCropForeground_": {"doc_hash": "e4cc20ed43f5f0b10db2c6c01d4e6ef20cb40c413a31ab7cdc2ab1bcb3185d91"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foregroundd.py_TEST_CASE_4_TEST_CASE_5._": {"doc_hash": "9aa553871c9ff0e213b0800fa49c167356bb1ed8eddb070c7fc0b0f4677f3a05"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foregroundd.py_TestCropForegroundd_": {"doc_hash": "33659be5fde0eec0f57061028b6638ff037020d59ebfac8771a92990c8d4f68d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cross_validation.py_os_": {"doc_hash": "cefb19043c777f6a7e6b675d49d75a6c52d3a272972ea4ad79edb0e0fb58d8e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cuimage_reader.py_os_TEST_CASE_RGB_1._": {"doc_hash": "0b17ece2d80face5fe7ee639e096f15919bdb90873f777decf606b7850c37c58"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cuimage_reader.py_TestCuCIMReader_TestCuCIMReader.test_read_patches.self_assertIsNone_assert_": {"doc_hash": "0960268ae0d7db3048d0da20b5053fff461ad288ae815e149ccc305abbdf4432"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cuimage_reader.py_TestCuCIMReader.test_read_rgba_TestCuCIMReader.test_read_rgba.None_1": {"doc_hash": "800107c8dbb217900f0939a55d5246ba71e8c84ae6ea7657421322b0ff740aa5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cuimage_reader.py_TestCuCIMReader.create_rgba_image_": {"doc_hash": "c4e1696d428a9bfdd694adb300321aeb4027ebe814982e2507d10c0fa46fbc79"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_stats.py_logging_TEST_CASE_6._": {"doc_hash": "1d4e91067921d98e3f43e053f3b17b912e085dcd4e496396af4d00ff4018cf74"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_stats.py_TEST_CASE_7_TEST_CASE_8._": {"doc_hash": "9290f92a86e257f607bd7e25dba04306e11f8301e0c21d860c5c5d1cf020cc34"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_statsd.py_TEST_CASE_7_TEST_CASE_9._": {"doc_hash": "ae830adf764c385b48493f4bc5c23da645a7f2862d65da5c88f0f3fcf243f16e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dataloader.py_sys_TEST_CASE_2._": {"doc_hash": "2287c6534f7e94fcb4538b59866bd3923563ad7b7e9e355db32cc2ae7030668a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dataloader.py_TestDataLoader_": {"doc_hash": "8986ad209d924790b139f244d69e4eda9b7df9a12e040b4a1d2505aa57601ca0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_decollate.py_sys_TESTS_append_RandAffined": {"doc_hash": "059987f424a91e9354dd3c0e13d9e8ea841dc73f4515f5f8097604eee33c2f44"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_decollate.py_TestDeCollate_TestDeCollate.check_match.if_isinstance_in1_dict_.else_.raise_RuntimeError_f_Not_": {"doc_hash": "fc5c6769ab7534bf00acc58e84e3d9fc3bc080d93eb77088c7fbcf6ef6264046"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_decollate.py_TestDeCollate.test_decollation_": {"doc_hash": "429ae00541b7e983608c984c0155db75a7d639b58542b05cb76ecd1dbbd403c5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_dataset.py_os_TEST_CASE_8._": {"doc_hash": "79849522d6bbb10b8810f092a3cccfdfe555906771cd256e59eec01017f97ef0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_dataset.py_TestCreateDataset_TestCreateDataset._create_data.return.datalist": {"doc_hash": "5da54030bf8ce548661604527b3a1896c7fedc6452f120a15a647a57f4b67fd0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_dataset.py_TestCreateDataset.test_create_dataset_TestCreateDataset.test_create_dataset.if_expected_region_is_not.self_assertEqual_deepgrow": {"doc_hash": "737d601f213d0b30ce8101a6f36eb23f52a99c047ae8d872500caaba55861f5f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_dataset.py_TestCreateDataset.test_invalid_dim_": {"doc_hash": "d0d7ccd647b63c78082eed3e12f1d115924a1dc53cc77bda014958944fed0a30"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_interaction.py_unittest_": {"doc_hash": "57c22f4d5a1fabfb0d5e4b6b9ac9d148b59281e33bfc916ae745df222f0c01d2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_unittest_DATA_1._": {"doc_hash": "a76c8c89d408a61fd085f4653f59804b894481b0f94e1ff317d93634c11105a1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_DATA_2_DATA_4._": {"doc_hash": "e89b70983bc6a0f46a11bbc0d7bbb08584ce0b5c3f51b8d462173dda9ed14ef4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_DATA_5_DATA_10._": {"doc_hash": "9a5173b7eca09c7a4869b6b7e0165ccd004454da08aa302924431b4c54f28ff1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_DATA_11_ADD_INITIAL_POINT_TEST_CASE_1._": {"doc_hash": "ad708c7879e57df6f882ce4602cbbc550d5f28487048727dfcfeb55b82241bb0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_ADD_GUIDANCE_TEST_CASE_1_ADD_GUIDANCE_TEST_CASE_1._": {"doc_hash": "9d6ec555eaa00b5ed222b5bb5029e4a2aa59d81a3d3449a5826f772944f21ab9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_FIND_DISCREPANCY_TEST_CASE_1_ADD_GUIDANCE_FROM_POINTS_TEST_CASE_6._": {"doc_hash": "6eaa916b76b57441f9133b8012d584de4cdce1a79c62cd54b1c61180431816ae"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_SPATIAL_CROP_GUIDANCE_TEST_CASE_1_RESULT.np_zeros_10_20_20_": {"doc_hash": "ff53e11469c2f6718d8c8d438227f60b5f0462d4bf3bcda25fd670095554c610"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_RESULT_4_8_4_8_4_8_n_TestFindAllValidSlicesd.test_correct_results.np_testing_assert_allclos": {"doc_hash": "c9ef4e884b4a600a9c821f76da15eeefcfb0930bc0985e59e51c87029275b851"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_TestSpatialCropForegroundd_TestSpatialCropForegroundd.test_foreground_position.None_3": {"doc_hash": "61338795ce120eb3b980cc4f53cd83e137cd942ac6cd481970e3e62ea8febeeb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_TestAddInitialSeedPointd_TestAddRandomGuidanced.test_correct_results.np_testing_assert_allclos": {"doc_hash": "942b7dc96e127cc2ef4cfbf0deadb8e7d181ec5ef1282ef2431a678ce5d51e87"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_TestAddGuidanceFromPointsd_TestAddGuidanceFromPointsd.test_correct_results.None_1": {"doc_hash": "71f29d9215129e59cf9979dd2eb837a598dccc4623dc518c2ff612a3c4f5ff5f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_deepgrow_transforms.py_TestSpatialCropGuidanced_": {"doc_hash": "9142943d8ccb08ce00fbef4abf403a12ef9d7f017ca6e0e8415d93b1f9c90b8c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_densenet.py_unittest_TEST_PRETRAINED_2D_CASE_3._": {"doc_hash": "e2c1b332fed4c09ccb4848b6e9b29edb29d191f1e7888f5dee1380375c03e935"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_densenet.py_TestPretrainedDENSENET_TestPretrainedDENSENET.test_pretrain_consistency.self_assertTrue_torch_all": {"doc_hash": "d4d6f455bd382f3789138bb2159dfbdd26b8eec3199f25369b8e0919f84daf81"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_detect_envelope.py_unittest__MULTI_CHANNEL_VALUE_TES": {"doc_hash": "eb0ace45346d0784338a55cde16de45d33d8d2a8847038ba575ca570825930a7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_detect_envelope.py_TEST_CASE_2_CHAN_3D_SINE_TEST_CASE_INVALID_OBJ._a_string___call_": {"doc_hash": "ea91aa8bdee945d471ee33787dc67339d9371eeaaba17f2a69d83b47419c30ac"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_detect_envelope.py__method_expected_to_rais_TestDetectEnvelope.test_value.np_testing_assert_allclos": {"doc_hash": "25948980d7c5c0e85ac32e564088864062039ab95433defe0a4acb0e9e131b20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_detect_envelope.py_TestDetectEnvelope.test_value_error_TestDetectEnvelope.test_value_error.if_method___init___.else_.raise_ValueError_Expecte": {"doc_hash": "8f081ca438cecc198fafd24069b36fdd4cc3089c01bcccb3f155e12ca989b1eb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_detect_envelope.py_TestHilbertTransformNoFFTMod_": {"doc_hash": "079feea90e86b848dd3dd182ce5a6435cd4d5e169916b3d2cd9f84c59313f22e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_ce_loss.py_unittest_TEST_CASES._": {"doc_hash": "53478ec28947c804d70c4327ed5efc84fb6f105a6c3b5cddfceb841bdfde39f8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_ce_loss.py_TestDiceCELoss_": {"doc_hash": "4fc416812ac08833e39a331a9dbe2b81b23fe7b0d6527d911591488917f70e78"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_focal_loss.py_unittest_TestDiceFocalLoss.test_result_onehot_target_include_bg.for_reduction_in_sum_.for_focal_weight_in_None.for_lambda_focal_in_0_5_.np_testing_assert_allclos": {"doc_hash": "4355dd2d81ca5622daa02231a7e85efcba9083b418c02e0021678c7c4907406d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_focal_loss.py_TestDiceFocalLoss.test_result_no_onehot_no_bg_TestDiceFocalLoss.test_result_no_onehot_no_bg.for_reduction_in_sum_.for_focal_weight_in_2_0_.for_lambda_focal_in_0_5_.np_testing_assert_allclos": {"doc_hash": "c0a68a06792bad78a980562c944e23911fdb56826fa53647013e99e057160648"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_focal_loss.py_TestDiceFocalLoss.test_ill_shape_": {"doc_hash": "f74c57054eb9bf192f89561817d2e8471271cca53ba368c421baae188825b2bd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_discriminator.py_unittest_CASES._TEST_CASE_0_TEST_CASE_1": {"doc_hash": "817be37c362e0de355c165905acf6e6fb01d8938fa4d9c5312a21b8e0aef9dad"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_discriminator.py_TestDiscriminator_": {"doc_hash": "9f7bf0f8a6aadb4872736d55ad84a0419e47cc7434aa0784bdd6f6f7b74a8877"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_distcall.py_unittest_": {"doc_hash": "34e940a513ea6baa47bd0a4a47b744ebbeafd0ab348e2a3d7ea72723aaccd8d0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_distributed_sampler.py_unittest_DistributedSamplerTest.test_even.if_dist_get_rank_1_.np_testing_assert_allclos": {"doc_hash": "24bd0b32a9ed1904c39edd93fb4a69f3523579b7ffc60cec4ed39171cb583225"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_distributed_sampler.py_DistributedSamplerTest.test_uneven_": {"doc_hash": "7e43e8bb1259a8b357793b4486aaa023521416513c7a61fc94c7e908f7120d5a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_distributed_weighted_random_sampler.py_unittest_DistributedWeightedRandomSamplerTest.test_sampling.if_dist_get_rank_1_.np_testing_assert_allclos": {"doc_hash": "ee20a93e0adbef665a00fa65ca1e7057f3970fa506ad2be29779197e357d7d4c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_distributed_weighted_random_sampler.py_DistributedWeightedRandomSamplerTest.test_num_samples_": {"doc_hash": "b5fa50d5a31aa15f5086c99dd5418d31dcfd3462cc024702541477bc6ace99e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dvf2ddf.py_unittest_TEST_CASES._": {"doc_hash": "a29324dac815f31824d6a9862db6c5b346e08a6afa2c1a37764e22b749461c07"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dvf2ddf.py_TestDVF2DDF_": {"doc_hash": "995f6e4abbe8ca2548b1015b8adf923e89956cf4ce2324c55132a04c17671231"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet.py_for_spatial_dims_in_2_3_for_spatial_dims_in_2_3.for_res_block_in_True_F.for_deep_supr_num_in_1_.for_strides_in_1_2_1_.TEST_CASE_DEEP_SUPERVISIO": {"doc_hash": "44da078c3b0334b6bcdfc37bef26f024935076e9cad8c86f4562104de095a30f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet.py_TestDynUNet_": {"doc_hash": "329d61e5b0dc2374aefaf274736e95e4cadf945e02c41c7b6c7ce144cfeff39b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet_block.py_TestResBasicBlock_TestResBasicBlock.test_script.for_net_type_in_UnetResB.test_script_save_net_tes": {"doc_hash": "dc27214c3d63c42d60c0bf53ce652337a30097026e782547f383c6f83fbface6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet_block.py_TestUpBlock_": {"doc_hash": "31f8ba4f22985a2b7de60f3f704163a73178962baf4eb938bad8de8cda12dcdf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_os_get_model_names.return._efficientnet_b_forma": {"doc_hash": "8f53be4625e061a5d7c659821f7a29aaa546d16a5a869ab3b123a830e684c822"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_get_expected_model_shape_get_expected_model_shape.return.model_input_shapes_model_": {"doc_hash": "b4059cd6302e4dded310512b7b4bffea3d56da4699998007088daf8098f5cfe0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_make_shape_cases_make_shape_cases.return.ret_tests": {"doc_hash": "19450c2509dcaf8431527ddca569411bc6a1041b1d42df0ab9d273536925b1b7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py__create_list_of_selected_None_28": {"doc_hash": "d65e262339c57bfe6b3cbf96ca5223ad42b3e10eacf4cd779867e6121ad61a7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_None_3_None_4": {"doc_hash": "efe2517dc37266539aec8564e6d80b1209d29f7c84a88d516cb9f3d242c11c9e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_TestEFFICIENTNET_TestEFFICIENTNET.test_shape.self_assertEqual_result_s": {"doc_hash": "e539fd8b44978a7e57854212d1684390f50e853ac5f20f58dbc273155e3c6c6d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_TestEFFICIENTNET.test_non_default_shapes_TestEFFICIENTNET.test_non_default_shapes.for_candidate_size_in_non.self_assertEqual_result_s": {"doc_hash": "8e8c557d45b1af11b36601819fe25c307ac4aba9687a30b33072110bba2aa36f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_TestEFFICIENTNET.test_kitty_pretrained_TestEFFICIENTNET.test_kitty_pretrained.self_assertEqual_pred_lab": {"doc_hash": "c7f66e49ee5cc59ea6700a4a3e2f545190bbc596e3f18956dbd92985494f9d7b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_TestEFFICIENTNET.test_drop_connect_layer_TestEFFICIENTNET.test_drop_connect_layer.for_rand_tensor_shape_in_.None_1.self_assertTrue_abs_p_cal": {"doc_hash": "83100445e98f58a1e882b3a746e298e5efe7a106d91c983e64bc9fde1367a783"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_efficientnet.py_TestEFFICIENTNET.test_ill_arg_": {"doc_hash": "ea3325ab1ba16ce8c8702e7d7a959415a66f3b263eae4df0bad90e8bb286c088"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensure_channel_first.py_os_TEST_CASE_7._": {"doc_hash": "7215e8802fd0b5ff6dd9b2d59a9c410de334f22a14e41a5841356700209d4fed"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensure_channel_first.py_TestEnsureChannelFirst_TestEnsureChannelFirst.test_load_nifti.with_tempfile_TemporaryDi.self_assertEqual_result_s": {"doc_hash": "430a6446a6a4596cd36b8e727572dea9a533105a4f7673a8637ff1e04e150be9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensure_channel_first.py_TestEnsureChannelFirst.test_itk_dicom_series_reader_": {"doc_hash": "7c3701e6364f38c8d1eca6aae636997728a938a1ff68b3031a9433e690f274dc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensure_channel_firstd.py_os_TEST_CASE_3._": {"doc_hash": "1e45b4f6035d361e56adb5dafe521ac00bfdd99c131615fe4b738565a574b909"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensure_channel_firstd.py_TestEnsureChannelFirstd_TestEnsureChannelFirstd.test_load_nifti.with_tempfile_TemporaryDi.self_assertEqual_result_": {"doc_hash": "5eca47f6efa93c16c411f5eef2d5e36a9664dc36667a94733b37c848e76faa20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensure_channel_firstd.py_TestEnsureChannelFirstd.test_load_png_": {"doc_hash": "52cd33c81d4ab24d3fc28eb8207c8ce8c267cb2b83d011bec092d5eb75150a8a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_enum_bound_interp.py_unittest_TestEnumBoundInterp.test_bound.self_assertEqual_int_b_ze": {"doc_hash": "c63883a1b38b761a80f257bafe87f88810fe4314f3c1c4ff5a5847f5a568280d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_enum_bound_interp.py_TestEnumBoundInterp.test_interp_": {"doc_hash": "d185320bf2a410f3f50471dd7564447f0dc0033d066c6b62959916f424308cfb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_eval_mode.py_unittest_": {"doc_hash": "afb089157622c0988787481ef83c348a399709a630e2ac4d5fd5620745a205de"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_evenly_divisible_all_gather_dist.py_unittest_": {"doc_hash": "12d1c5b953a7596d8ecb97621ca7a3f99baa38947de68a5152b72f0f5850643e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_file_basename.py_os_": {"doc_hash": "6475dade6350a7cc0828f83afd71b9c6eaa785bdb6a9c8a2d4a13aa25f1b29b3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_consistency_with_cross_entropy_2d_onehot_label_TestFocalLoss.test_consistency_with_cross_entropy_2d_onehot_label.self_assertAlmostEqual_ma": {"doc_hash": "16e7792b17f2a91092432188849df41d95ce0a806f597c01dfa89c4da841f01d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_ill_opts_TestFocalLoss.test_ill_shape.with_self_assertRaisesReg.FocalLoss_reduction_mean": {"doc_hash": "2568650b2b089b600ae8efa37a4fc888742b4f8764a334bb82c18e7f1d98c177"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_ill_class_weight_": {"doc_hash": "9d3ade0370a6ab151e4b7dd179b3e9f709d4142c3ca0e7cc325d1f9960797b33"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fullyconnectednet.py_unittest_VFC_CASES._VFC_TEST_CASE_0_": {"doc_hash": "dbd49b0b25937fd82bc4174b3aa155cfe0f941663fdb386912751996a241dd85"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fullyconnectednet.py_TestFullyConnectedNet_": {"doc_hash": "4760da3cb06f52e528166a3e9078df7b1e538a5a2e7ad50f028da625c73630a0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_unittest_TEST_CASES_SLOW._": {"doc_hash": "dadb586ac171bdf60a061139d0dd7d07fec4adf94ed19631bbf7207244843550"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_TestGaussianFilterBackprop_TestGaussianFilterBackprop.test_train_slow.self_code_to_run_input_ar": {"doc_hash": "e0bdd17b4bc817dd390f9de12d84389cba73e9ed7fbbc3a55224d302f6e14f1e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_wasserstein_dice_loss.py_unittest_TestGeneralizedWassersteinDiceLoss.test_bin_seg_2d.for_weight_mode_in_defa.None_2": {"doc_hash": "23989a8bd7d5c7b15db4a364e7a9747a6753643a7f985b200ecee86529797ecb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_wasserstein_dice_loss.py_TestGeneralizedWassersteinDiceLoss.test_different_target_data_type_TestGeneralizedWassersteinDiceLoss.test_different_target_data_type.for_w_mode_in_default_.for_t_in_target_list_.self_assertAlmostEqual_lo": {"doc_hash": "15f62c9635fcb84d165688fd1f46dcc3942b00e6df8050d706ac0af653da9a9d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_wasserstein_dice_loss.py_TestGeneralizedWassersteinDiceLoss.test_empty_class_2d_TestGeneralizedWassersteinDiceLoss.test_empty_class_2d.for_w_mode_in_default_.None_1": {"doc_hash": "317991eb6d1f3e2b79d3d0db35e5e4dbbf8656f4334a6fbd016920597551b188"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_wasserstein_dice_loss.py_TestGeneralizedWassersteinDiceLoss.test_bin_seg_3d_TestGeneralizedWassersteinDiceLoss.test_bin_seg_3d.for_w_mode_in_default_.None_1": {"doc_hash": "90f41cbb4209db4c16bb924bc58e0d3524bd6e293be2f4f9a712379058919d4c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_wasserstein_dice_loss.py_TestGeneralizedWassersteinDiceLoss.test_convergence_TestGeneralizedWassersteinDiceLoss.test_convergence.for_w_mode_in_default_.self_assertGreater_diff_s": {"doc_hash": "2779d299eb498bb2cc831e65fa9ac566c5575704637b9c99da52f853532176ee"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_wasserstein_dice_loss.py_TestGeneralizedWassersteinDiceLoss.test_script_": {"doc_hash": "c705ee5e223e0621d65957e2c1b1ebb121f51d983162ee01ef19cef187a8b3a6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_param_groups.py_unittest_TEST_CASE_5._": {"doc_hash": "88e4bd5d942d5a0a73985fece23b83296bb737dd0f34412a8b457414c7f87739"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_param_groups.py_TestGenerateParamGroups_TestGenerateParamGroups.test_lr_values.assert_sum_n_26_or_al": {"doc_hash": "563cae8626dd56c2f6a8512299c26c4ebfffc397d5bca73e86b94a6437da4126"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_param_groups.py_TestGenerateParamGroups.test_wrong_": {"doc_hash": "9092926b821be02fe70485bd8dd3b76d9e8a886729e2503ed2b197a365b9bc70"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generator.py_unittest_CASES._TEST_CASE_0_TEST_CASE_1": {"doc_hash": "c1ea74f8af7c44a214236913477f1669ec613789b7665e57d433887569ab7bdb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generator.py_TestGenerator_": {"doc_hash": "316b71698d0a8248b0132bed6014e5cd53877d1d45df40f6d0e77d319276b148"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_get_extreme_points.py_unittest_": {"doc_hash": "fbfcc8ce9ce39c772b0b860369b7cbe95353e2fcbb82fd11211ee0c8406d94ec"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_get_package_version.py_unittest_": {"doc_hash": "f09e3a8f061d1291fdb1747fef687321a2d28206fb01e4cfce322f24e4586581"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_global_mutual_information_loss.py_unittest_TEST_CASES._": {"doc_hash": "b0f3674896af843f5c9bbfeed94cd328948602e4851c88dd883f76397a7ae924"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_global_mutual_information_loss.py_TestGlobalMutualInformationLoss_TestGlobalMutualInformationLoss.test_ill_shape.None_1.loss_forward_torch_ones_": {"doc_hash": "fea207ce48d91545549f2a77f02bb20e2d683466018ca629f969e158b244fd63"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_global_mutual_information_loss.py_TestGlobalMutualInformationLoss.test_ill_opts_": {"doc_hash": "3197bfbcea5b0627101c13fc135395225f09e150603758371284c2f1e3f1c893"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_globalnet.py_unittest_device._cuda_if_torch_cuda_is_a": {"doc_hash": "6ea4065cc0d311e1fe6966fe1390bd5a28a24d3abf041c3b2a698b4e52cdde93"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_globalnet.py_TestGlobalNet_": {"doc_hash": "39a2113372bb12674630d36de0d07964c14f7a92efe09fc981bb47e1eedde623"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_grid_dataset.py_sys_TestGridPatchDataset.test_shape.self_assertEqual_len_j": {"doc_hash": "fd852b2c28da21643f1a873302cc629dc73fcf05e1e1285f8c669f87780b90a3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_grid_dataset.py_TestGridPatchDataset.test_loading_array_": {"doc_hash": "73d4a0cf3544b98969adb7e7dfe9970ce034460cdb1bc509aa1bcbd232a185cc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_grid_pull.py_unittest_for_bound_in_bounds_.for_interp_in_interps_.for_input_g_in_True_Fal.for_grid_g_in_True_Fals.TEST_1D_GP_append_test_ca": {"doc_hash": "3cc104f4a295ce5512fd6c12e0aa0675b737eb38947691aecb8b7703bb23e44a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_grid_pull.py_TestGridPull_": {"doc_hash": "f9bff9e6076245730b20b33b00dd6e216779a8a3158428f191cb5882a359d8dd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_loader.py_logging_TestHandlerCheckpointLoader.test_one_save_one_load.with_tempfile_TemporaryDi.try_.except_ValueError_.None_1": {"doc_hash": "d19c811bfb071710327d34040bc3f45494fa90087fb6c4e2d7fe745bfc71d03a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_loader.py_TestHandlerCheckpointLoader.test_save_single_device_load_multi_devices_TestHandlerCheckpointLoader.test_save_single_device_load_multi_devices.with_tempfile_TemporaryDi.torch_testing_assert_allc": {"doc_hash": "60119581d4b2c802b5bbfd6021e5e497f77583425b7ff023ad987d547a044fe5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_loader.py_TestHandlerCheckpointLoader.test_partial_under_load_TestHandlerCheckpointLoader.test_partial_under_load.with_tempfile_TemporaryDi.torch_testing_assert_allc": {"doc_hash": "a99dd85d272a656a53ecbb84bcc475f09c973c581788ac3714ea6e175ce10288"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_loader.py_TestHandlerCheckpointLoader.test_partial_over_load_TestHandlerCheckpointLoader.test_partial_over_load.with_tempfile_TemporaryDi.torch_testing_assert_allc": {"doc_hash": "566ed305f697820b43c2cc8f409a591b918d9158dedc4de109464f48be2a89eb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_loader.py_TestHandlerCheckpointLoader.test_strict_shape_": {"doc_hash": "c19979ba7a60f077a5c8a8b30a6445f0db4a8636780e8176e874f49e2cfe0e3b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_saver.py_logging_TEST_CASE_8._False_None_True_val_": {"doc_hash": "8a67c01f10ebdabb2640a39d695f9eb5392164adb5a8486583c89a25aae5280a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_saver.py_TestHandlerCheckpointSaver.test_exception_TestHandlerCheckpointSaver.test_exception.with_tempfile_TemporaryDi.self_assertTrue_os_path_e": {"doc_hash": "d7cf1dca11596522c146af99418624bd07e6fa6b02f9817712b88d005a416cfc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_saver.py_TestHandlerCheckpointSaver.test_load_state_dict_": {"doc_hash": "8fe658e6f1f5fcef38dc4440c22c0375c591b35eb782070bf839904f0e730c5c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_classification_saver_dist.py_csv_": {"doc_hash": "b2c92506adb6a30c268871d008cee75f856901f72483979cbb6514fd3bd61afd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_confusion_matrix.py_unittest_data_2._": {"doc_hash": "32516b203d341eb3f86004ec1b48784e0c84f2e0456d85c53fddb47dc8917105"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_confusion_matrix.py_TestHandlerConfusionMatrix_TestHandlerConfusionMatrix.test_compute.self_assertAlmostEqual_av": {"doc_hash": "0f3a1208be3c38ed9a9b717975f1f98a60e815263fd909ca1b5efc70c94b9ad8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_confusion_matrix.py_TestHandlerConfusionMatrix.test_compute_seg_TestHandlerConfusionMatrix.test_compute_seg.self_assertAlmostEqual_av": {"doc_hash": "67deb538dc86c52dc31829717a8b03ed0cc37e3d3b52a52347ae4189ddc17a1d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_confusion_matrix.py_TestHandlerConfusionMatrix.test_shape_mismatch_": {"doc_hash": "e514b09a6585fb2832d9f3d844a607243b7400d41b02d610aecf631970a4ae20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_confusion_matrix_dist.py_unittest_": {"doc_hash": "f31a1c5233f4ed36cdabba6751f80fb1927284f70029caab7effa43cce669d30"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_early_stop.py_unittest_TestHandlerEarlyStop.test_early_stop_train_loss.None_3": {"doc_hash": "cfb5e75b7c77c59294d12a671479d55da676a407f4b7fcd3ec19944fdc010e84"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_early_stop.py_TestHandlerEarlyStop.test_early_stop_val_metric_": {"doc_hash": "708f6d0b28d8bb9d0612a4efb9be0c8f0ad237a93a95b8229d785299d232a380"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_garbage_collector.py_gc_TEST_CASE_2._0_1_2_Events_EPOCH_": {"doc_hash": "3876108e72653ed12a6e5ff0cb9f26177b6620d97108223c672c3cd6932ad2d5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_garbage_collector.py_TestHandlerGarbageCollector_": {"doc_hash": "2e26ea8e7c78f122345be87eec09b789b2b930517f725348b04e7dd9e153d190"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_hausdorff_distance.py_unittest_create_spherical_seg_3d.return.image": {"doc_hash": "1fa3018aaea3484c4c6e91bf5e615153eeef30405857b5da5ddf8a7eeb9a3373"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_hausdorff_distance.py_sampler_sphere_TEST_SAMPLE_4._sampler_sphere_zeros_sa": {"doc_hash": "4f2ec66ca129f7c6d3e6eeb3b94328e99b04e2577e9150ddfbc47c1871083e97"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_hausdorff_distance.py_TestHandlerHausdorffDistance_": {"doc_hash": "926b5b587451ad85b5487942af9091f16b457a2d7c3c928d029f121ebeacb401"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_mean_dice.py_unittest_TestHandlerMeanDice.test_compute.self_assertTupleEqual_tup": {"doc_hash": "90ef3033f33fd565135e0f3b7c5f7fbf6eb9578bcc7db62b798cb328947bb0a0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_mean_dice.py_TestHandlerMeanDice.test_shape_mismatch_": {"doc_hash": "84a2d616415c504ac05de8234d618f6e7d15079b9e89e34bb50fb6cf96f0edd8"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_metric_logger.py_unittest_": {"doc_hash": "625f490d0e1fc8bd29eccc46b21d637e61f65320231aa4777d95bd7449ba6aa1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_metrics_saver.py_csv_": {"doc_hash": "146d1b0adf96b9d6eb1fb931af348b77562a13ace7ed3a49a0e2dc9df761043b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_metrics_saver_dist.py_csv_DistributedMetricsSaver._run.engine_run_data_max_epoc": {"doc_hash": "265ca7632bf961c4c198f32cbe2c12e3afdbad3747052659e3df7dd4b2d20505"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_metrics_saver_dist.py_DistributedMetricsSaver._run.None_2_": {"doc_hash": "2aa910ae4e229908909c997cd98fcdfc68a17979be5233c407923519d855aee2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_parameter_scheduler.py_unittest_TestHandlerParameterScheduler.test_linear_scheduler.None_8": {"doc_hash": "a30237b607e4696544acb7fa41b965f14f23e5d6d19c10e9348f939beba07894"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_parameter_scheduler.py_TestHandlerParameterScheduler.test_exponential_scheduler_TestHandlerParameterScheduler.test_exponential_scheduler.torch_testing_assert_allc": {"doc_hash": "fda4ab2707e75b1794425587c25d7cd8a623a4330effd0f691cd4db3084a0361"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_parameter_scheduler.py_TestHandlerParameterScheduler.test_step_scheduler_TestHandlerParameterScheduler.test_step_scheduler.torch_testing_assert_allc": {"doc_hash": "819381223fdf1d4a3fc39cc14fe971bf2159310726a0355a79049bf506750bad"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_parameter_scheduler.py_TestHandlerParameterScheduler.test_multistep_scheduler_TestHandlerParameterScheduler.test_multistep_scheduler.torch_testing_assert_allc": {"doc_hash": "91c80da20175da576eed01191e1798f166d10eb2a037ce353895e6bf552f3ce5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_parameter_scheduler.py_TestHandlerParameterScheduler.test_custom_scheduler_": {"doc_hash": "2f1483fe5a87a6c8591d471117af05bce9e0229296c934c64118836ee4e9fe67"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_prob_map_producer.py_os_TEST_CASE_2._temp_image_inference_ou": {"doc_hash": "0c8224ed6904f6b27a2ffa8359a25a8b7d900ed746fd0f0e34246006d18d01c7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_prob_map_producer.py_TestDataset_TestEvaluator._iteration.return.batchdata": {"doc_hash": "e7ba15b3a41d3cf781c4e724499989275ecab4000e3d60b4c00d9903176c84dc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_prob_map_producer.py_TestHandlerProbMapGenerator_": {"doc_hash": "8d76338422fcfac35b01652daee07bde02cdb0b7494199119e28b1772894c8bc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_rocauc_dist.py_unittest_": {"doc_hash": "fc8fb50d5293258d5abe0b5f01fc90a1c714ce184378a3370339c48cd9cb3908"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_surface_distance.py_unittest_create_spherical_seg_3d.return.image": {"doc_hash": "979342b9ca24d68a3a4ee63053d5fd4737b8db985c56782e2418c0af9e44b81d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_surface_distance.py_sampler_sphere_TEST_SAMPLE_4._sampler_sphere_zeros_sa": {"doc_hash": "e3ea8bb0cedf250547c0bda35f4ced40252edc4938c343021f6dab1f65b2003c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_surface_distance.py_TestHandlerSurfaceDistance_": {"doc_hash": "3c01d61124f44771f1c43e6dde5deb6dc2727d545a647e19c5732e5ae70af2a1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_transform_inverter.py_sys_KEYS._image_label_": {"doc_hash": "8a2d9cd107f0be984704b1e0db81ee419ecb691b6c9d3c0fd7684c0fac4c0084"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_transform_inverter.py_TestTransformInverter_TestTransformInverter.test_invert._check_the_nearest_inerp": {"doc_hash": "c79559407428f40221f285b40718afa8e173129d5d3c7aa605b3cd822b752a98"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_transform_inverter.py_TestTransformInverter.test_invert.for_i_in_engine_state_out_": {"doc_hash": "3bb1d4daa7ee535ef18069c3635438ba7b72065aa187a28decedd552d85aff7c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hashing.py_unittest_": {"doc_hash": "a23e7c14f26ddcb1b27aa0924aa351b70e9e775d73daf8694155d139a7029864"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hausdorff_distance.py_TEST_CASES_TEST_CASES_NANS._": {"doc_hash": "246751121ca383bbcd21293c533550b0f04a32062ad20afd319cb116d3c1a4cd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hausdorff_distance.py_TestHausdorffDistance_TestHausdorffDistance.test_value.for_metric_in_euclidean.for_directed_in_True_Fa.ct_1": {"doc_hash": "2bf8d81a69532fa18b40c3a14a9628417086c714ecf53e5741801d8bc0aadb1b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hausdorff_distance.py_TestHausdorffDistance.test_nans_": {"doc_hash": "77be8122d57d2c09697d9c51b9087d161853fcc27a479f5addd032d8b518b8f2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_highresnet.py_unittest_TEST_CASE_4._4_channel_1D_batch_": {"doc_hash": "cd32c08a754182330baf36ca23740d2fe28341c534461f6bb311369f2302e817"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_highresnet.py_TestHighResNet_": {"doc_hash": "324a75ea6635ff25f6a7cc5d0656ab65b5c0e3b8aca57adb690ca1553dcf9d5c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hilbert_transform.py_unittest_create_expected_numpy_output.return.ht": {"doc_hash": "bca4decdd04f7ba518044c96e643e3b546a9b6d8bbcd4975d09a7ee0e14cda37"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hilbert_transform.py_cpu__GPU_TEST_DATA": {"doc_hash": "6229c9fe2df632739bf84b88e0d6398a53eec817e7187889216d4bc3de83e28c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hilbert_transform.py_if_torch_cuda_is_availabl_if_torch_cuda_is_availabl.TEST_CASE_2D_2CH_SINE_GPU._": {"doc_hash": "78ff870f733849bd2c32134bafcde2756ff4ac8b091e1311a37c5ab9326da660"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hilbert_transform.py__TESTS_CHECKING_PADDING__TestHilbertTransformCPU.test_value.np_testing_assert_allclos": {"doc_hash": "508e73fbc72108efd52de29f5b84182a4cbcaa7e801a439559b83ac54b62ab28"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hilbert_transform.py_TestHilbertTransformGPU_TestHilbertTransformGPU.test_value.np_testing_assert_allclos": {"doc_hash": "00e13709d30866edc78e87f29afdbd6eb6190a342acf60753b4ae8001d6cbde2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hilbert_transform.py_TestHilbertTransformNoFFTMod_": {"doc_hash": "0bbcf6297b8c260b78a8ebc817c1c83221cb5cb9a478dea7f72bbb3957a3d027"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_image_dataset.py_os_RandTest.__call__.return.data_self__a": {"doc_hash": "7a8139bf88f6db8ca44f7365ac93656941c55ebfe22604edb20bf3ff4bdfa4f0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_image_dataset.py_TestImageDataset_": {"doc_hash": "480fa650fe4678f6995d1af7a8dd229692698de0d508c12a6b7d65283d1c2dc1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_init_reader.py_unittest_": {"doc_hash": "4048a93218ca7bc53cc7bd823507ee6982a7ab836b4585c5a8f2c4ff614f8e0d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_run_training_test_run_training_test.model_filename.os_path_join_root_dir_b": {"doc_hash": "f45d4fc9ba1db5426c25bc146244eb01c9270780f7d94016a9e5812e5a013161"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_run_training_test.for_epoch_in_range_epoch__run_training_test.return.epoch_loss_values_best_m": {"doc_hash": "5fe3130e0dbc24d07dc4612c7198da99601a50ca02d9a3228bea6680ed1711d1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_IntegrationClassification2D_IntegrationClassification2D.setUp.self.device._cuda_0_if_torch_cuda_is": {"doc_hash": "4a6aaccc4275d31ad4e22fdac5ba09772370fb12dcac912f8d7856daeb89a2c9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_os_TASK._integration_segmentation": {"doc_hash": "29f9c7ab1ca7a4b2c92af1846c3a3624dee686fcd1786ee298a0aa2ee56b534d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_run_training_test_run_training_test._create_UNet_DiceLoss_a": {"doc_hash": "ffcf9404698111de003dc21f3173671f833db72571848fdd03218fdecb62332c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_run_training_test.model_run_training_test.return.epoch_loss_values_best_m": {"doc_hash": "234e9ca5ee1c57442d95b2da39722cddb293a2fae19c5cb171869eb39be5114a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_IntegrationSegmentation3D.train_and_infer_": {"doc_hash": "6a2e97c19f171e1c6bbdc35b9e20d601d15a3479353662960b3de6cad215bd65"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_sliding_window.py_os_from_tests_utils_import_D": {"doc_hash": "6f40b88b8862b1b701f5f3e43b5bb75e0dd0a7b5be662016f252121820261a41"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_sliding_window.py_TestIntegrationSlidingWindow_TestIntegrationSlidingWindow.tearDown.if_os_path_exists_self_se.os_remove_self_seg_name_": {"doc_hash": "2b1a7002f5e77a2eaf588adebb3c970ce4875e1dca27daaf6334a84689cdac1f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_sliding_window.py_TestIntegrationSlidingWindow.test_training_": {"doc_hash": "efc7e8b93ab102f10818cb54d739d65b96708f59e1efb8d4c61e1b5f3639dc7b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_unet_2d.py_unittest_run_test.return.loss": {"doc_hash": "6071e888947c694cefa3ff19d4c8998e4bd2a30efde2be33d77f0431bff83b0d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_unet_2d.py_TestIntegrationUnet2D_": {"doc_hash": "a3b39035d36da9469ac671225ce197dd1c8dd38a033cb0243aac2122cbeb8aa9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_logging_TASK._integration_workflows_": {"doc_hash": "604969726f237719914f18fd9ef1f2da78c82a9faead68dba21def628436fefb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_run_training_test_run_training_test._TestEvalIterEvents._forward_completed.pass": {"doc_hash": "eebba9597d3b809689a369939921308fd3e4eaef05fe68404df4402b875b4bf4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_run_training_test.val_handlers_run_training_test.train_post_transforms.Compose_": {"doc_hash": "cdf61675bd5697e77dff954142ce1f2878575ad1e2a11d5a6da1f7a3fb51f9d4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_run_training_test._TestTrainIterEvents_run_training_test._TestTrainIterEvents._model_completed.pass": {"doc_hash": "67b92c7be4dd4d66043625f323daa9accfe69b988edfb309def519cf16168dfa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_run_training_test.train_handlers_run_training_test.return.evaluator_state_best_metr": {"doc_hash": "a9bdc12a09ae32912e6441f3fd9734da4cd6cc64e5a3bbb7303664d718a3ea69"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_IntegrationWorkflows.train_and_infer_IntegrationWorkflows.train_and_infer.return.results": {"doc_hash": "2c807096fb8a474e8b6455932ddb0badf9e4cf4df5fd0086aef57fc692c1e156"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows_gan.py_logging_from_tests_utils_import_D": {"doc_hash": "da4c310e15728c7aaa288e01c78602a0edf63838388e0139f37bd59845f18c99"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_random__non_sensical_tests_cro": {"doc_hash": "e39ff44e55516909d119d7d2dd5facfad9fdee4498b17ff6d0878e517d48a4df"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_for_t_in__None_11": {"doc_hash": "9f7f8bad2be2fa29ced540084fffeafeaac9d9c72e82ae84ff11374017a33cd4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_None_12_None_28": {"doc_hash": "69287b6940dc0482616d06f12e9ce0e4b3f11b05c19d995e019edb0733098460"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_None_29_no_collation.return.x": {"doc_hash": "a3afcbef418fd1edcacbdc06c184418423a309fa6eb74bcfe4deaeb4730c94a0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_TestInverse_TestInverse._Test_inverse_methods_": {"doc_hash": "27ccd996e30251b1f6c204d14aaf11680a12cdd450b0e516a4594b8b1e5d45d9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_TestInverse.setUp_TestInverse.setUp.self_all_data_3D_loa": {"doc_hash": "cfb4316945bf9377ea5c594e3bb5f36e0e412816fa80cf23c46ea8667b42236b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_TestInverse.tearDown_TestInverse.check_inverse.for_key_in_keys_.if_isinstance_orig_np_nd.try_.except_AssertionError_.raise": {"doc_hash": "ebb78a971196ba25f20281752a860f96781e94245b463f2d1fa63445e8a0421a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_TestInverse.test_inverse_TestInverse.test_inverse.for_i_t_in_enumerate_rev.if_isinstance_t_Invertib.self_check_inverse_name_": {"doc_hash": "e4c7d1cc1bd9f0fa22043fa58081f43bbb27f69192c44a3546d6dc29a1589caa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_TestInverse._skip_this_test_if_multi_TestInverse.test_fail.with_self_assertRaises_Ru.t2_inverse_data_": {"doc_hash": "d63ddabea256bdb371b246208c4703a71cce1b4324f996b4e91b0e69000d9020"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse.py_TestInverse.test_inverse_inferred_seg_": {"doc_hash": "91742dfc64fa420a4031b79bb89ba691c9da20e7f9b8832e662b5d7bc44c86a9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse_collation.py_sys_TESTS_2D._": {"doc_hash": "893305e27120a96f461972a5bd1b7a2247339390424acdf10df33a4862a29eae"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse_collation.py_TestInverseCollation_TestInverseCollation.setUp.self.batch_size.7": {"doc_hash": "e888a0f81e71d1cd5d5223748bf0cdd83c15e68b6add2ed0b2c989ff18ea5a9b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_inverse_collation.py_TestInverseCollation.tearDown_": {"doc_hash": "835f4a9b8540f6bdec464ec6b99a9d6cc6acf0be68586a39cf3bf0576d4312bc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_iterable_dataset.py_os_": {"doc_hash": "2a7730d17d86ff17cf51973fe5c5509ceead953158783ddbfa5e43b9f497ddb1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lesion_froc.py_os_WIDTH.800": {"doc_hash": "d40bf2fee4b2dc13f0234b45c86cac56636653ae0934c3aba4c9e4d45d945f3f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lesion_froc.py_prepare_test_data_prepare_test_data.None_10": {"doc_hash": "8fd6f368aedd7adb18d04a42de4c30c3e16721aea7db27a265ceb80e63b1d32e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lesion_froc.py_TEST_CASE_0_TEST_CASE_6._": {"doc_hash": "c75caf7e5311f0d5c4ebc4df11d1cd879330076d184354f7bc4e8b838c3c701c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lesion_froc.py_TEST_CASE_7_TEST_CASE_9._": {"doc_hash": "8f711baeed491098a49c278ca727fbc328bc95783dbb323c1fdb198a2267783e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lesion_froc.py_TestEvaluateTumorFROC_": {"doc_hash": "47e40465769a0ce603d3370d1ec788ea146e45de5ad543eec93d1b41b8d46baf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lmdbdataset.py_os_TEST_CASE_7._": {"doc_hash": "237aabae91d71b19aff255c89534767302e7a9d04917dd89a213297fb153f217"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lmdbdataset.py_TestLMDBDataset_TestLMDBDataset.test_cache.self_assertTrue_isinstanc": {"doc_hash": "92077734408aa9c32680f698a887324caa8a252896b8d490127f445d5a730bc2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lmdbdataset.py_TestLMDBDataset.test_shape_TestLMDBDataset.test_shape.with_tempfile_TemporaryDi.data2_postcached.dataset_postcached_1_": {"doc_hash": "81dd8fbdd55cdf4302191b8266d9bbfd58fb49fa12d79ff823430092e97c9069"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lmdbdataset.py_TestLMDBDataset.test_shape.if_transform_is_None__": {"doc_hash": "536a867f76f5c658209c068ef4e742c5aff9f466bf07b984dcad758b758ed55b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_TestLoadDecathlonDatalist.test_seg_no_labels_TestLoadDecathlonDatalist.test_seg_no_labels.with_tempfile_TemporaryDi.self_assertEqual_result_0": {"doc_hash": "0b714702338233dccb5a782ffa03c479301f48abf711a19bca0bf3818d0aaa22"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_TestLoadDecathlonDatalist.test_additional_items_": {"doc_hash": "fe5d9e512e502134ce5a448ed68739fd89f411086a2b9d79a03ef9a189400fa4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_os_TEST_CASE_11._": {"doc_hash": "8ec013bfa99a14a8793280282fdd85f96e5ebc2ef679c22ceb3603cde504c2ea"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_itk_reader_multichannel_TestLoadImage.test_itk_reader_multichannel.with_tempfile_TemporaryDi.None_4": {"doc_hash": "4bb50f5bfe3d913e9a30a06e17931476873e27856dd73ade88e53741a329a1ea"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_load_png_TestLoadImage.test_load_png.with_tempfile_TemporaryDi.np_testing_assert_allclos": {"doc_hash": "c8d4f0a7cfd1ca8b1866364645835b9db1216e37d59b36aedd12d791da77116f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_imaged.py_os_TEST_CASE_2._keys_KEYS_reader_": {"doc_hash": "25c793abebec6bf41a69da2a4bd9a85d92ca08276985361e76b9c659e6d142d3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_imaged.py_TestLoadImaged_TestLoadImaged.test_shape.for_key_in_KEYS_.self_assertTupleEqual_res": {"doc_hash": "58abc7f066c382909ed8c18e1aa2abf0daac0d082a509394533a26753faf4904"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_local_normalized_cross_correlation_loss.py_unittest_TEST_CASES._": {"doc_hash": "e32b707dadd736b2e0f4a95169679fcfaa98d5a02864132ba7c47b7ee1aa21dc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_local_normalized_cross_correlation_loss.py_TestLocalNormalizedCrossCorrelationLoss_TestLocalNormalizedCrossCorrelationLoss.test_ill_shape.None_1.loss_forward_": {"doc_hash": "9a62d106ceae051881053bd1bda50edc3b804138f5ba2bed1afa337458021c8b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_local_normalized_cross_correlation_loss.py_TestLocalNormalizedCrossCorrelationLoss.test_ill_opts_": {"doc_hash": "e93ec8d796c599c9a9c2e2ccce76d20a85b580ea8f9ce16ba1e2a773df84acab"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_localnet.py_unittest_TEST_CASE_LOCALNET_3D._": {"doc_hash": "e454b3f70076169d7d4f2c0c4496716ab72483f5a34da7a8a59802b2e5488333"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_localnet.py_TestLocalNet_": {"doc_hash": "d7f6f8e2fbe5adfbaee99c822bb2a91f23a4db20b9e1bb910551e5cef92beb95"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_localnet_block.py_unittest_in_size.4": {"doc_hash": "1c29e4d7536462f5d3cc316a6f134f119ca9e11cbb37e872069047c6a191786f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_localnet_block.py_TestLocalNetDownSampleBlock_TestLocalNetDownSampleBlock.test_shape.with_eval_mode_net_.self_assertEqual_mid_shap": {"doc_hash": "07db2b298135e70524ee98abf2089d8bfb2ee8b3d91de660c8221e9c40e05e40"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_localnet_block.py_TestLocalNetDownSampleBlock.test_ill_arg_TestLocalNetDownSampleBlock.test_ill_shape.with_self_assertRaises_Va.with_eval_mode_net_.net_torch_randn_input_sha": {"doc_hash": "a1f8c5a9f8de111055c5a01a3e7530eb766c2fa93d126e25d74fa0f79e1998ee"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_localnet_block.py_TestLocalNetUpSampleBlock_TestLocalNetUpSampleBlock.test_shape.with_eval_mode_net_.self_assertEqual_result_s": {"doc_hash": "6c05d0f0b7dc547069a1d3b27dfb46874e6ec21df7114fd8a2d2049824cbb4bf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_localnet_block.py_TestLocalNetUpSampleBlock.test_ill_arg_TestLocalNetUpSampleBlock.test_ill_shape.with_self_assertRaises_Va.with_eval_mode_net_.net_torch_randn_input_sha": {"doc_hash": "c883f5c9907e5b434ec67f068ebd3f189d5d8927b3ce57c0926b30c0e77bfe7c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_localnet_block.py_TestExtractBlock_": {"doc_hash": "98b6606ac4ae28fb9fe1f73e748078742acf7bd931d0521f2d2462a1c9b016ef"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lr_finder.py_os_device._cuda_if_torch_cuda_is_a": {"doc_hash": "959dc9325887cb53668a9cb7aa630012899e2af36e7bc72c674ec945df70c2de"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lr_finder.py_TestLRFinder_": {"doc_hash": "82eb899ac9f075f82baa63f48dca6ef006a8b37ea4ef1d8fd663eeb612661b7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_label_value.py_unittest_TEST_CASE_8._": {"doc_hash": "4b86f5904e7783209b243406e7c72d5dd32f527d3e6041d57375b369ae23cfe5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_label_value.py_TestMapLabelValue_": {"doc_hash": "0e9019b6d4987386b2c431c4c4fd6d5b926487b2658f19ad4348a6a740906d1d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_label_valued.py_unittest_TEST_CASE_7._": {"doc_hash": "7061ab4386567796a9b3afbbd57c8638b12d95decd8e6189cdcd5cfb734b5dac"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_label_valued.py_TestMapLabelValued_": {"doc_hash": "b2fe6dc48affbbcbd4adc2ff1c71473a9e0fb8fb44faa070f6d13fe971c59640"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mask_intensityd.py_unittest_TEST_CASE_3._": {"doc_hash": "e83ff25801c5412a3f55a9a321f64368647769a5879368778290b6ecfa2b5640"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mask_intensityd.py_TEST_CASE_4_": {"doc_hash": "f5c58d7e9e51eee3c47320853faba4167bb31f4216f87142836349fba4a24717"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_inference_wsi_dataset.py_os_TEST_CASE_1._": {"doc_hash": "648c4cbc519bf971eaa97941f345c4d94c79c30c2556dc66c61dc7da1a2d77a5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_inference_wsi_dataset.py_TEST_CASE_2_TEST_CASE_OPENSLIDE_0._": {"doc_hash": "16ebfe2f233621b51f4e0167de3f103a3da531aba1e02bed7318b5f018644e8a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_inference_wsi_dataset.py_TEST_CASE_OPENSLIDE_1_TEST_CASE_OPENSLIDE_1._": {"doc_hash": "823ff5080f041f1486e40d307489a6821d5331c9815d877555612f0453c2b593"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_inference_wsi_dataset.py_TestMaskedInferenceWSIDataset_": {"doc_hash": "75c4267f9b79c6b57ca4883853b93fcaeeeb1a1a3ede4ba93cd184754d5fe0cf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_module_list.py_glob_": {"doc_hash": "6dc637d4a4a4951fa71232c031e480909c08a4333399adf62b5409832ead5c58"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_multi_scale.py_unittest_TEST_CASES._": {"doc_hash": "9ebbb5a5745d457119113a83a2c78f870d7d8ae1b41e45c9f111edd934a1e70f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_multi_scale.py_TestMultiScale_": {"doc_hash": "c9378de5c41c73d12ec94cc761759490877a7d820ea822467755995e2970524b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_endianness.py_os_for_endianness_in_.for_use_array_in_True_F.for_image_only_in_True_.TESTS_append_endianness_": {"doc_hash": "1b020624ca06cf945b63d8ee94049c5e43750cbb5edb684a341c8e28ae70d8d6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_endianness.py_TestNiftiEndianness_TestNiftiEndianness.test_endianness._.next_iter_check_loader_": {"doc_hash": "6129fd008dcaa1adffca74bb3b381176a99fe264c31d06b3a1ac63a6a000e4aa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_endianness.py_TestNiftiEndianness.test_switch_": {"doc_hash": "9f5702994c680ed1932d4a59bcff3b16bf269cbb7186f0020a04944ad2d0c0b6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_saver.py_TestNiftiSaver.test_saved_3d_resize_content_TestNiftiSaver.test_saved_3d_resize_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"doc_hash": "e61b22ea37a838c256c63a69c579aa642c6c2f665aa7afa072471aada4e4cd09"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_saver.py_TestNiftiSaver.test_squeeze_end_dims_": {"doc_hash": "888e093c56333b43484ff77c052b16d33da45a85640d1434dda689fc65166fa3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_npzdictitemdataset.py_tempfile_TestNPZDictItemDataset.test_load_stream.None_2": {"doc_hash": "f468b4f8664ddad8769cabecf813418c2d9af2aa39c58bd6248d34c07460afb3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_npzdictitemdataset.py_TestNPZDictItemDataset.test_load_file_": {"doc_hash": "34f7627e9eeb72aa34e26206fa99b8416752bb1ba2ec25b322e5b542e1e23f3e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_occlusion_sensitivity.py_unittest_TEST_CASE_FAIL_1._2D_should_fail_sinc": {"doc_hash": "b60fa9326815580d6a9382bbfc342fc8a1d66513f583319dfb448bd94d70853a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_occlusion_sensitivity.py_TestComputeOcclusionSensitivity_": {"doc_hash": "d91c650a5bafe2d4d90d0729c0b20d71bfadcf2d0a53f0516d533ab7fe0b1c33"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_openslide_reader.py_os_TEST_CASE_4._": {"doc_hash": "394baab6dc7a8736a6cbb18d9ea15e4b1808d699ed443c3cf3289cb1d3bcdc9a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_openslide_reader.py_TestOpenSlideReader_": {"doc_hash": "f83f9cc00460bd4d2a179c8f92d452f1910e820a2c61b2ac524fe38087b32a36"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pad_collation.py_random__Dataset.__getitem__.return.self_transforms_self_imag": {"doc_hash": "0e8d13f63882ff57e5abe4a79a9adc13052493ab9c2a8b1fcce048ea1ff67a53"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pad_collation.py_TestPadCollation_TestPadCollation.tearDown.set_determinism_None_": {"doc_hash": "3a50d8ba848d74879035d81e1ed475a5259d1a6a69d5406a9f40e4044fb8a6dc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pad_collation.py_TestPadCollation.test_pad_collation_": {"doc_hash": "e237e49e39b0d014ab9122ed6a0dfd234a555e9cf3d3a60cf6642e1011245f88"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_partition_dataset.py_unittest_TEST_CASE_8._": {"doc_hash": "76bebcaf3efb1f91086f7f5faf60f4007af5682c2d19905245bb5e4507c8a41b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_partition_dataset.py_TestPartitionDataset_": {"doc_hash": "de06d2fd26edafa2f19ce6d1ebe736d28a780968c2816f96464b0b01d170e4bf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_partition_dataset_classes.py_unittest_TEST_CASE_3._": {"doc_hash": "6b1e6ad30a4c05bdeb1b00798b79d1aaa954774504eda9f5ea4fe7685e7dda99"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_partition_dataset_classes.py_TEST_CASE_4_": {"doc_hash": "ba90b77a1e638f4fa2073e9581115f67c886db8df7127aeb6c350bc38cac307e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_wsi_dataset.py_os_TEST_CASE_3._": {"doc_hash": "5e0d154349eccb6df1c501ddbab2a347d186ea0345ac6c76673900f86c3f8c02"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_wsi_dataset.py_TEST_CASE_OPENSLIDE_0_TEST_CASE_OPENSLIDE_1._": {"doc_hash": "403bb80b8f81ef7cd2a86cbdef52a0906a714a911d64a6a2d17eadf09bcd6325"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_wsi_dataset.py_TestPatchWSIDataset_TestPatchWSIDataset.test_read_patches_cucim.for_i_in_range_len_sample.None_3": {"doc_hash": "41804d562ac2fb1358d8f82bd05f4ae42b8320aa19b9f764718f78397777fe45"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_wsi_dataset.py_TestPatchWSIDataset.test_read_patches_openslide_": {"doc_hash": "1f11466fb29afa34ad3816aa8cd8362cc1a7b920a14b82020b9a66c2cc4485a7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pathology_prob_nms.py_unittest_": {"doc_hash": "09707ddd886b8ed0cb29ebf368658d5d42c3aa6ca6ad4b433665f55087ccbf7f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_persistentdataset.py_TestDataset_TestDataset.test_cache.with_tempfile_TemporaryDi.None_5": {"doc_hash": "faa842e48c9b0cd36a0791bac130144e81b2ff545a41ee6fc9df3e1531ca3462"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_persistentdataset.py_TestDataset.test_shape_": {"doc_hash": "aab209f2e936b4f8a5a35150e8430f12bcb9d1f7a1d01ea94b8141923dc7c373"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_phl_cpu.py_unittest_TEST_CASES": {"doc_hash": "369a3b1b76844af58d41d725860f2a4b724fdd105c4d0215f0f45c613683b2ea"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_phl_cpu.py_PHLFilterTestCaseCpu_": {"doc_hash": "52467a8ab5acc2f5fbf3566ec6e46367d58fbca328351935d6f5c391579ca262"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_phl_cuda.py_unittest_TEST_CASES._": {"doc_hash": "3661d065d1df0f101f598c9d2c29e80f804ceebd25653df8e8579a8d54ae5fac"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_phl_cuda.py_PHLFilterTestCaseCuda_": {"doc_hash": "e0ec7426f421210b6a0ec75943ccc4de6944df1d36ee39c40ba2a1657720357e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_saver.py_TestPNGSaver.test_saved_content_spatial_size_TestPNGSaver.test_saved_content_spatial_size.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"doc_hash": "f73a35d03c6978863407cd911d934b38fcd7633658be1392ee6f02e2325ae96e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_saver.py_TestPNGSaver.test_saved_specified_root_": {"doc_hash": "8f09d20c559be663120a8f064e2afa32cb2310828f63e5087a8a028c6d0ac643"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_print_info.py_unittest_": {"doc_hash": "752f0a7590e0aac73dddca14268a8f2454a49713487de5eaea202caab09ad417"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_probnms.py_unittest_expected_7._0_9_66_66_0_7_33_": {"doc_hash": "c7ebaeed5e72283eca84f2f211d6a75d3f355d37af4fc0da7aa713ee92fa3a64"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_probnms.py_TEST_CASES_2D_7_TEST_CASES_3D._": {"doc_hash": "89dc38652f7820219eb715f443bdeaad57bf463e35f89f264b59940ad92c3baf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_probnms.py_TestProbNMS_": {"doc_hash": "5426fccce7ccac2ae343f66cda0a9b07cb9f21cd811f12e5a28b52615ffc635f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_probnmsd.py_unittest_if_torch_cuda_is_availabl.probs_map_7.probs_map_7_cuda_": {"doc_hash": "dc03d6423a60eaf2b3f11f0f14fc1c2d5241d47fa02ac098fa9a640d91a6ba48"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_probnmsd.py_expected_7_TEST_CASES_3D._": {"doc_hash": "bf1a19a1dfe1bc52b1189f8ecd00ccbea9f232040524d05a27644357895ce140"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_probnmsd.py_TestProbNMS_": {"doc_hash": "86ceedfc35418dfd4b2b09570d75db6f6a68c970dd01ae1b67b297d11d017795"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_axis_flip.py_unittest_": {"doc_hash": "e341c6278ce29b60b2b67cb49a5d63d314d839607118c9e6bd2b064fec3fe97d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_axis_flipd.py_unittest_": {"doc_hash": "98ad43f980d7776c3e7db8ebf39a84ca151c34f1cb15c2bd12d9c560e97c45f2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_noise.py_unittest_TestRandGaussianNoise.test_correct_results.np_testing_assert_allclos": {"doc_hash": "362525adc0ba3ccd38db8340565a92e6c2d365fdd0bb7544b70d6964c407ebf4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_noise.py_TestRandGaussianNoiseTorch_": {"doc_hash": "3c362e6b79859dc5e85014f180b3f5f42e168e1a7b1fbde7759e172e237d96b3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_noised.py_unittest_seed.0": {"doc_hash": "5b350ff1bf71a14e298f34d8ade80c223b72b74d5765a44704aaacbe0e772532"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_noised.py_test_numpy_or_torch_test_numpy_or_torch.for_k_in_keys_.np_testing_assert_allclos": {"doc_hash": "4186eb2d9c8d87cd64673642143103ae0702b119bb813a173d0d6ac15e12bb63"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_noised.py__Test_with_numpy_": {"doc_hash": "80286336b0280df2bdf334d76d4c1f8ea60d0141f578779ce067e378a9dee8d2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_lambdad.py_unittest_": {"doc_hash": "c56b3a5f8bc7361ae71518db11143a34255e69ecad2f82d9b755518c03a13317"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate.py_unittest_TestRandRotate2D.test_correct_results.self_assertLessEqual_np_a": {"doc_hash": "4792cdcb4599594a08907e8bb042409e1edb4fe0a439d43e87e9a2a8e2d51a36"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotated.py_unittest_TestRandRotated2D.test_correct_results.self_assertLessEqual_np_a": {"doc_hash": "729278a6f3f8b16dfc183b513a94bb493cc0a8ab69bb1a236935a12be83a1295"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_std_shift_intensity.py_unittest_": {"doc_hash": "42f7f91d3af1a61f2daad36c6bbb0159f2660d97c89eaf9eb7b830503333120e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_std_shift_intensityd.py_unittest_": {"doc_hash": "19eba7a7218d09f6252d22beb0c7e174f0b70291844520909c72cb51ffaa83f2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_crop.py_unittest_TestRandWeightedCrop2D.test_rand_weighted_crop_small_roi.None_3": {"doc_hash": "574be95edc048c5ef501ffcaa0ad5c44f589b317ff59854a9080abae1c9dd44b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_crop.py_TestRandWeightedCrop2D.test_rand_weighted_crop_default_roi_TestRandWeightedCrop2D.test_rand_weighted_crop_default_roi.None_3": {"doc_hash": "fb99ccb0d3847344bbeac7016438a1e7fd2666ece28cdcc1e8ac64ffbd626188"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_crop.py_TestRandWeightedCrop2D.test_rand_weighted_crop_large_roi_TestRandWeightedCrop2D.test_rand_weighted_crop_large_roi.for_res_in_result_.np_testing_assert_allclos": {"doc_hash": "85e0a947fc3c215e29fb9a2dfab8f3ade7c282c3317456464ccc5751d80dbfea"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_crop.py_TestRandWeightedCrop2D.test_rand_weighted_crop_bad_w_TestRandWeightedCrop2D.test_rand_weighted_crop_bad_w.None_3": {"doc_hash": "850d5b426e3dedf6e98803bc7da133717ef09f3e0b9880439b646730e71802d4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_crop.py_TestRandWeightedCrop_TestRandWeightedCrop.test_rand_weighted_crop_small_roi.None_3": {"doc_hash": "927d9d5acb686f830df5509f6178bc18a36427c1da04c76c28652e3a9c8c2e88"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_crop.py_TestRandWeightedCrop.test_rand_weighted_crop_default_roi_TestRandWeightedCrop.test_rand_weighted_crop_default_roi.None_3": {"doc_hash": "441bb6baffb7527360161d7d96a6afdb1b05cffd718eba4b08a27827e7393031"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_crop.py_TestRandWeightedCrop.test_rand_weighted_crop_large_roi_TestRandWeightedCrop.test_rand_weighted_crop_large_roi.for_res_in_result_.np_testing_assert_allclos": {"doc_hash": "07873cc014a1584023e0448ca80d1613575bd5eabcc7b82ee735f9b09c5e5dc3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_crop.py_TestRandWeightedCrop.test_rand_weighted_crop_bad_w_": {"doc_hash": "6d5c26ca344a163e8f1d2d25ec74a1c55a11c627abb1b401eaab795d039abe1e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_cropd.py_unittest_TestRandWeightedCrop.test_rand_weighted_crop_small_roi.None_3": {"doc_hash": "4aec65e1aa324f63a81ac84fc33bdb9e3ef1ec88535b05a34be12605b4ee9d30"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_cropd.py_TestRandWeightedCrop.test_rand_weighted_crop_default_roi_TestRandWeightedCrop.test_rand_weighted_crop_default_roi.None_4": {"doc_hash": "9a715fb4501d2efe54f94c963db931ec566cb22025a4f96be1b4d3238cc1e14b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_cropd.py_TestRandWeightedCrop.test_rand_weighted_crop_large_roi_TestRandWeightedCrop.test_rand_weighted_crop_large_roi.None_5": {"doc_hash": "05687061b1456b95e8baca6efda6e29435a1f8d8557c2cc32e7943c55e5c9dbc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_cropd.py_TestRandWeightedCrop.test_rand_weighted_crop_bad_w_TestRandWeightedCrop.test_rand_weighted_crop_bad_w.None_4": {"doc_hash": "5cf875c6e582d5bfbd093cb6f7928c62348f0c31f974f749a007ce50481694a7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_cropd.py_TestRandWeightedCrop3D_TestRandWeightedCrop3D.test_rand_weighted_crop_small_roi.None_3": {"doc_hash": "41980900aa0b230110f5e678a3cd79f214be77f04a25a9299e5105754eac2f00"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_cropd.py_TestRandWeightedCrop3D.test_rand_weighted_crop_default_roi_TestRandWeightedCrop3D.test_rand_weighted_crop_default_roi.None_4": {"doc_hash": "24128f181db46da331dfec30fb2b6a97b2ef9562586e34c63494c089cf1cf511"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_cropd.py_TestRandWeightedCrop3D.test_rand_weighted_crop_large_roi_TestRandWeightedCrop3D.test_rand_weighted_crop_large_roi.None_3": {"doc_hash": "6efc5035d9f1c977ce438937379df09cdb2e4ea55f46340068eaddc8dc55d190"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_weighted_cropd.py_TestRandWeightedCrop3D.test_rand_weighted_crop_bad_w_": {"doc_hash": "99de9075e24cdfc51bcbb43a58de60391f41f1ca51ac9377c4d75ad7dd96fa0a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoom.py_TestRandZoom.test_invalid_inputs_TestRandZoom.test_invalid_inputs.with_self_assertRaises_ra.random_zoom_self_imt_0_": {"doc_hash": "9c037ecb3a06c5a5423a172967d4022ad3866148702fc1674f1221da2b86c588"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoom.py_TestRandZoom.test_auto_expand_3d_": {"doc_hash": "c76b4a5a6dd5c22c4767083bf2ea2c28647966dc85e1ab2bb324814c2d92c5b5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoomd.py_TestRandZoomd.test_keep_size_TestRandZoomd.test_invalid_inputs.with_self_assertRaises_ra.random_zoom_key_self_im": {"doc_hash": "24476b42d3406b29c363bc8a3aa9bc081ea4fa6956de2d2ddabf09eb75194d68"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoomd.py_TestRandZoomd.test_auto_expand_3d_": {"doc_hash": "1f99b35a170bc97c741491cd4bd6732205d0e3ff96c316d9834fce6caad05188"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_random_bias_field.py_unittest_TEST_CASES_2D_ONES._coeff_range_1_0_1_": {"doc_hash": "154f32b8c2f0a5454a01439215a1b8963cbc7d61de7b6c2126d2219e4c0816ce"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_random_bias_field.py_TestRandBiasField_TestRandBiasField.test_output_shape.for_degree_in_1_2_3_.None_2": {"doc_hash": "0e1820636077e49c6a94db66f259d4980bc596e824febbcf2b45329d26fb2ad9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_random_bias_field.py_TestRandBiasField.test_zero_range_": {"doc_hash": "2f906ee941e0dff21fd2345d2b54d38cbbdd6df9a5011909c2a088ab800c5f5c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_random_bias_fieldd.py_unittest_TEST_CASES_2D_ONES._coeff_range_1_0_1_": {"doc_hash": "cf1fb76316fd02131204aa2c510da8da961f70a59213e2b9ba08c707b5247ccb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_random_bias_fieldd.py_TestRandBiasFieldd_TestRandBiasFieldd.test_output_shape.None_1": {"doc_hash": "3189c93a37618fa4d3f67cbb29a889a0153d3f7f8053bbfc22e57512ad8c427a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_random_bias_fieldd.py_TestRandBiasFieldd.test_zero_range_": {"doc_hash": "5e96f4bd71d0e116b2db2aa3aea2b55b4265abd945b4ce3047c9dba8eb533a9f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_randtorchvisiond.py_unittest_TEST_CASE_2._": {"doc_hash": "065c45b24e07e3d3b958010bb317199243481c2d5755f74423a874e7c1a15c9b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_randtorchvisiond.py_TEST_CASE_3_TEST_CASE_3._": {"doc_hash": "a9f5010e184282cd091ec29e28b8f0b95d33ea76fd515754ed71bb5953a5df67"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_randtorchvisiond.py_TestRandTorchVisiond_": {"doc_hash": "c537137a4b771d017afa8f094c07613c1d4d203e886c1ac429192d875b80470f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_reg_loss_integration.py_unittest_TEST_CASES._": {"doc_hash": "ea3241349ffd346d4cbbac975546d13866618e8200b2ea53447aaad97604279a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_reg_loss_integration.py_TestRegLossIntegration_": {"doc_hash": "b69bc39358ec55d1b96bb86d2f5822b6ca8cab7088711b895981ce868fa235c1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_regunet.py_unittest_TEST_CASE_REGUNET_3D._": {"doc_hash": "7efbc6d5cf56b39a4cfc0231a4f95d4db7bb4c35d588374373b36f77ac4b3488"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_regunet.py_TestREGUNET_": {"doc_hash": "6317f924a722aae974ab13fb630abb746403ebe111ade5e2c162c40ac6c29c6b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_regunet_block.py_unittest_TestRegistrationResidualConvBlock.test_shape.with_eval_mode_net_.self_assertEqual_x_shape_": {"doc_hash": "f8fe762e7f5c4f170fc3c4f247b613c69edbbfd7840aa03495424e171649142f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_regunet_block.py_TestRegistrationDownSampleBlock_": {"doc_hash": "79c2fe1fa14feb094a064420a88bb11fc71b1811b658453b33f6c1ae57e39260"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_remove_repeated_channel.py_unittest_": {"doc_hash": "1f9714e9b9d5fa4ceb243e94e727694879141cd781eace6c66a7355ca55d5f55"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_remove_repeated_channeld.py_unittest_": {"doc_hash": "eb8db3ee33edc7105b779e9a250e0497d278702a0f6d5a7604803c3c49dfd1e5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resize_with_pad_or_crop.py_unittest_": {"doc_hash": "7c277fa99a81871a265b42c25065c44394a6bdcf26f4ce1fb536320a445f4b10"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate.py_TestRotate2D_TestRotate2D.test_correct_results.self_assertLessEqual_np_a": {"doc_hash": "290dd7ef7a660d5d642bd87bfe1d297da34e55c3c5c245bfcf60b2e7fe2bf4fc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate.py_TestRotate3D_TestRotate3D.test_correct_results.self_assertLessEqual_expe": {"doc_hash": "b00e61771d957e7f7f0c52d50b90c27bb516523b3d5960ff1cdf84fe8c034ae6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_saliency_inferer.py_unittest_": {"doc_hash": "8de9d438f70b775db80e0297bbb0019d2b784d9a45faf69546acdfd49e320b75"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_save_image.py_os_TEST_CASE_6._": {"doc_hash": "67a7510036e38d538d0de3982ac075e78b6851fc3075c98a49c50b0ea174e771"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_save_image.py_TestSaveImage_": {"doc_hash": "56f9cd01b9caee7169ced746c8093fca5c598c8977a46381dfff945f32a554fd"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_save_imaged.py_os_TEST_CASE_6._": {"doc_hash": "4a20c960f0fbf5c11dfd4146b49c136fb0b527142722ad33c00a9c6020a596e7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_save_imaged.py_TestSaveImaged_": {"doc_hash": "3974ce2145a7ce267767009ce4e66cf2e65d7632a01cad246a5e7f2564823b7e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_savitzky_golay_filter.py_unittest_TEST_CASE_SINGLE_VALUE_REP._": {"doc_hash": "6aad2e8ff585594fbafa688c104b58bc4439fee6f146063c25607f4890c4f86f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_savitzky_golay_filter.py_TEST_CASE_1D_REP_TEST_CASE_SINE_SMOOTH._": {"doc_hash": "e06d3a27a28e5929aba1370d87466c812c830ee05e0904070988e3bf15638351"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_savitzky_golay_filter.py_TestSavitzkyGolayCPU_TestSavitzkyGolayCPU.test_value.np_testing_assert_allclos": {"doc_hash": "312260a191f583a2eb785b7dcc7fb8c9bd12add046fd08ea4669b48f322e3350"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_savitzky_golay_filter.py_TestSavitzkyGolayCPUREP_TestSavitzkyGolayCPUREP.test_value.np_testing_assert_allclos": {"doc_hash": "8031772b742a87f53aab2591679c9838269918146728c71e5cdc00f1be48d776"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_savitzky_golay_filter.py_TestSavitzkyGolayGPU_TestSavitzkyGolayGPU.test_value.np_testing_assert_allclos": {"doc_hash": "739f1e32fbbb6eacc66e285c0524b98e82e7352b61131c88934efa63cbea40d9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_savitzky_golay_filter.py_TestSavitzkyGolayGPUREP_": {"doc_hash": "58fbcb153bf8d5abffb0d5a806d9e9556aeffc983a45f7af35dab6e373e48517"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_savitzky_golay_smooth.py_unittest_": {"doc_hash": "ddb55d871ca2aa6c8ff39a3957dfba28a207d8afb4822195fe016dbd9365b431"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_blocks.py_unittest_for_type_1_in_relu_r.for_type_2_in_prelu_.TEST_CASES_3D_append_test": {"doc_hash": "d84a8fdb28de400c839b9d8318b7b138a696959692f5a9e8f3d67d0d3dfa5d1d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_blocks.py_TestChannelSELayer_TestChannelSELayer.test_ill_arg.with_self_assertRaises_Va.ChannelSELayer_spatial_di": {"doc_hash": "6206a83458b452327041a4f86ae2b1793fe03fcccab608d1f672821d12465a20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_blocks.py_TestResidualSELayer_": {"doc_hash": "e66cc40a6fa1958edabc63d223b805cf3264e18d9d1cddd446f16f8ffb768841"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet.py_unittest_None_2.for_init_filters_in_8_1.for_out_channels_in_range.for_upsample_mode_in_Upsa.TEST_CASE_SEGRESNET_VAE_a": {"doc_hash": "eb083b2d578b4540b23c63ace5d9493476f0e8ee348939f6e7f78a4586998a53"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet.py_TestResNet_TestResNet.test_script.test_script_save_net_tes": {"doc_hash": "49caa1e519c86ce15578241b2d782395095b87975464d8d98a0ed89d6037b0ee"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet.py_TestResNetVAE_": {"doc_hash": "bf7f8276b87e029dbd01a7f44e87cdbb1d1538d82c3273512901c42cbdc60073"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_select_cross_validation_folds.py_unittest_TEST_CASE_2._": {"doc_hash": "b149b35e4b3b8330d64ed0a7cd1d323f6ffc72580069578e8e267d5f7f93e63f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_select_cross_validation_folds.py_TestSelectCrossValidationFolds_": {"doc_hash": "76d5f48a453862eaca2a1a33b3db2a17e2b2d4fabc06780834661d3628d8d12e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_select_itemsd.py_sys_": {"doc_hash": "34f463d4731a6f33073dcf850551a913d3e6c1ab3818c77bdabf9f9f2d13f4dc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_senet.py_unittest_TEST_CASE_PRETRAINED_1._SEResNet50_spatial_di": {"doc_hash": "f0abbfd691979a74dc9bebdfda5ad98a515787bc5c65494d8eb3fb6f6fddc8f3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_senet.py_TestSENET_TestSENET.test_script.test_script_save_net_inp": {"doc_hash": "0c1e26f00756fc0c47ae4205b89d831f9c55a52ba9030cf08bfcf9dd4f486283"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_skip_connection.py_unittest_for_type_1_in_cat_ad.TEST_CASES_3D_append_test": {"doc_hash": "fb038aa66aac0a3f520f92a4b575addfdba70a9801746858d91688aeaba24909"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_skip_connection.py_TestSkipConnection_": {"doc_hash": "1a064f9dbd9ec78f5bda6eb43b1675747813e997b796f8c8dcdb14438c79233f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_sw_device_TestSlidingWindowInference.test_sw_device.np_testing_assert_allclos": {"doc_hash": "fc36ce1cc8c61e7f8ef117aa3576f4c352895744b0709787be6e0bafa70086bf"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_cval_TestSlidingWindowInference.test_cval.None_1": {"doc_hash": "6877029f5f97ff8441c2152c9a1429411ccabdcbe63d564117e6555b387b2f7a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_args_kwargs_": {"doc_hash": "088be45d5ad4ec06b28cb020492e13939587e231e6a215c92cfdb1f749fdd314"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_smartcache_patch_wsi_dataset.py_os_TEST_CASE_0._": {"doc_hash": "1b9598522f94a9cf55235190c3ef446977d2da3f8f01e6eb99c35313ba370945"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_smartcache_patch_wsi_dataset.py_TEST_CASE_1_TEST_CASE_1._": {"doc_hash": "694d12692c22d9a0af80b09648a555558401876294fbc90369475cfc378a914e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_smartcache_patch_wsi_dataset.py_TEST_CASE_2_TEST_CASE_2._": {"doc_hash": "812568d73d1e305e852d31e1d0371491ea2e95a163bd84f9577d1805120288fa"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_smartcache_patch_wsi_dataset.py_TestSmartCachePatchWSIDataset_": {"doc_hash": "fc568e3c48add339317b59ddd552f1956aa7fd73c9eb6e9d56d089345153775c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_smartcachedataset.py_os_TEST_CASE_5._0_5_2_Compose_LoadIma": {"doc_hash": "db70bb441eaa13edbdebe65f5f4b049f30e800150e3d8897bc54f097da29872d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_smartcachedataset.py_TestSmartCacheDataset_TestSmartCacheDataset.test_shape.with_tempfile_TemporaryDi.for___in_range_2_.dataset_shutdown_": {"doc_hash": "238967922d5e188337fb99e3683e79c499a93a0a27868a0842d30c4ed64979d7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_smartcachedataset.py_TestSmartCacheDataset.test_shuffle_": {"doc_hash": "0d0215902a735826b0b359a5e5f2b9ad01ba7ea19725ad2a85f6fcfb664ddca9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_crop.py_unittest_TEST_ERRORS._": {"doc_hash": "d03c226120ea47e4acd2d0497cc2976fd5f13d39a137189fa5e6d96c76fc6135"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_crop.py_TestSpatialCrop_": {"doc_hash": "1d8f24588be9f16b49b04bb1275938f9761ce2f0ca665bb10fb81c4fcc83c164"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_state_cacher.py_unittest_TEST_CASES._TEST_CASE_0_TEST_CASE_1": {"doc_hash": "bb982edd6fbfa07c38eee1a4fc739c60909e04b69586d739ce6156360beb6781"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_state_cacher.py_TestStateCacher_": {"doc_hash": "762b6e2ecbc68da32b93cdae20037ed288404258705d3c7aa70450b986b58d55"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_std_shift_intensity.py_unittest_TestStdShiftIntensity.test_zerostd.for_nonzero_in_True_Fal.for_channel_wise_in_True.np_testing_assert_allclos": {"doc_hash": "04d225a00e794a245be530c1fb8c97a23cdd2f9e8377801e295a6517b6c969b0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_std_shift_intensity.py_TestStdShiftIntensity.test_nonzero_TestStdShiftIntensity.test_nonzero.np_testing_assert_allclos": {"doc_hash": "834740f6f378e92b206367519b792ccce2f6d6aaf7e6184e09c6fe08b0a6b657"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_std_shift_intensity.py_TestStdShiftIntensity.test_channel_wise_": {"doc_hash": "ef675d4d23cd785cac402dbdacc346cc77c97279ee1f3ff689891bada2aff8ea"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_std_shift_intensityd.py_unittest_TestStdShiftIntensityd.test_zerostd.for_nonzero_in_True_Fal.for_channel_wise_in_True.np_testing_assert_allclos": {"doc_hash": "3a4c6bc82070b1d79bf0ddf6aa75327ced42ecffa1d20cb25ee94338c4c3bf7e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_std_shift_intensityd.py_TestStdShiftIntensityd.test_nonzero_TestStdShiftIntensityd.test_nonzero.np_testing_assert_allclos": {"doc_hash": "a7738fb22774d599d0cd56ee38fea68fdd76ade91adad2a9aff136164a455a14"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_std_shift_intensityd.py_TestStdShiftIntensityd.test_channel_wise_": {"doc_hash": "561a412fa3bd9c220cd8f1c2d8f783f2583bca3fe61cbe471b46432d97e3e877"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_surface_distance.py_TEST_CASES_TEST_CASES_NANS._": {"doc_hash": "ec2f7eea5f8b29c9d4213f1e63f90b70290c7f53f4438d0b4421425b82a2e499"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_surface_distance.py_TestAllSurfaceMetrics_TestAllSurfaceMetrics.test_value.for_symmetric_in_True_F.ct_1": {"doc_hash": "74fb288ca1145eafe7c342cf585cb3efb0edb17f950418caaf603d01effe9e7c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_surface_distance.py_TestAllSurfaceMetrics.test_nans_": {"doc_hash": "bce52100169209f60802a0d5bef228e36704a2677de81db46d9afca1e2c5c200"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_testtimeaugmentation.py_unittest_trange.partial_tqdm_trange_desc": {"doc_hash": "2067b10bf7b73e03325fdcd117f87059d4a67830e94624f1c0cf8b592d83881e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_testtimeaugmentation.py_TestTestTimeAugmentation_TestTestTimeAugmentation.tearDown.set_determinism_None_": {"doc_hash": "1bdb5d261580ee31ebdbaf478fac074214adf05be7a6337b7486749b405c9f22"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_testtimeaugmentation.py_TestTestTimeAugmentation.test_test_time_augmentation_TestTestTimeAugmentation.test_test_time_augmentation.self_assertIsInstance_vvc": {"doc_hash": "520c28d56c684dcee7644c37558c7f4fd919c8f257675ca6ca5b0475355fc8b1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_testtimeaugmentation.py_TestTestTimeAugmentation.test_fail_non_random_": {"doc_hash": "82724b52d9b7a23cc0423db2b0745f846eda1dc98b802f661f06b5d83a115e9a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_thread_buffer.py_sys_TestDataLoader.test_values.for_d_in_tbuffer_.None_3": {"doc_hash": "cde67ff2975a408896e4eeb512148a6e160672d86b143b6044dbf507b1fef283"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_thread_buffer.py_TestDataLoader.test_dataloader_TestDataLoader.test_dataloader.for_d_in_dataloader_.None_3": {"doc_hash": "9cb8534774d52e96fc2a15c8caf931c34ae9a792651287937c8e5cea5ded99d9"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_thread_buffer.py_TestDataLoader.test_time_": {"doc_hash": "09a961bac5fa0e0d840ed97ae4a3e5ac5a263bedb7e9b58095f93119cfbe45b3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threadcontainer.py_os_compare_images___optio": {"doc_hash": "c67d8f9f5980354daffc52b45b9a0514638684c62c33f69d8bb4cb3c8842d74b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threadcontainer.py_TestThreadContainer_TestThreadContainer.test_container.con_join_": {"doc_hash": "20b8a45506f11de2031de2fd395129df3ee81b1740d5b0bbb31f1a78a8784f36"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threadcontainer.py_TestThreadContainer.test_plot_": {"doc_hash": "0bad2ae25455620605c6a535c7beab42faea11133f38083f1e30e5e1b62186bc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_timedcall.py_multiprocessing_case_1_seconds_bad.assert_0_1_wrong_cas": {"doc_hash": "0f538ab8db6c79ef9e1f7bcef72bd3849c00af458f68cab4355c49c7b0934988"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_timedcall.py_TestTimedCall_": {"doc_hash": "d5948cba40592fce94e4a7a8dcaaa1870918cf716f419355c34889a5f31e5624"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_pil.py_unittest_TEST_CASE_TENSOR_1._torch_tensor_1_0_2_0_": {"doc_hash": "0dfbc00503c692c47202369be21f7d50a7d0c596db9e7b462443f3f313d24c20"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_pil.py_TestToPIL_": {"doc_hash": "d9c4c52f699675a4ffe0d1e8f9896895c758e95d20df6dd69499b7877bbf6b57"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_pild.py_unittest_TEST_CASE__TENSOR_1._keys_image_ima": {"doc_hash": "3ad4da7cf2c69efc4e31662215438c9e6b4405b0038fa7dee14da9a6ae06bc3d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_pild.py_TestToPIL_TestToPIL.test_tensor_input.np_testing_assert_allclos": {"doc_hash": "ad28716ec5b80b4a0a3a22936651036f33d2daab5d52ccae1097f71cb79f9492"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_pild.py_TestToPIL.test_pil_input_": {"doc_hash": "2780437bac8e9802fc4dc74a27ca58f88857bab33faccef4fc2225ff7a0fc2ed"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_torchvision.py_unittest_TEST_CASE_2._": {"doc_hash": "dc4bc7d495478852e6edde5baa53c43459989522eca9720d7b0a10d4659e7440"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_torchvision.py_TEST_CASE_3_": {"doc_hash": "6d7f9b012009f82c363ac7a1ccca3534f7b2b25530e629eaeb2b2010669a5aba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_torchvision_fc_model.py_unittest_TEST_CASE_PRETRAINED_2._": {"doc_hash": "a11ebf17bc1bc01ce9d7e3723a284e62ac086ee9c102618bc83f87606e85cbe5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_torchvision_fc_model.py_TestTorchVisionFullyConvModel_TestTorchVisionFullyConvModel.test_without_pretrained.with_eval_mode_net_.self_assertEqual_result_s": {"doc_hash": "55fbd9ce214316de1b004de50ee63f942f3eeb868f9b396efbfdaed03899bad7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_torchvision_fc_model.py_TestTorchVisionFullyConvModel.test_with_pretrained_": {"doc_hash": "2ef5b51a475ad4846948717cb346dc66a7a5cf752e9313595304ba21102a97b6"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_torchvisiond.py_unittest_TEST_CASE_2._": {"doc_hash": "bdc63f7250f4f366422b581daa533a098e30c46fce71b5f4ab7367931fa2c67c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_torchvisiond.py_TEST_CASE_3_": {"doc_hash": "5deb853e83f18e5a71996b6d1479f8d8727a32301d3ef1fad96c40e3f27f3d4e"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_train_mode.py_unittest_": {"doc_hash": "3f10953c62c270fb2b45ec568fecb5479a817272f5f094882476a775ec246122"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_unet.py_unittest_TEST_CASE_4._4_channel_3D_batch_": {"doc_hash": "bad68263db80900e36f9c9c0cd14529b9993325a7ebf794d3ee16dddb8de152a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_unet.py_TEST_CASE_5_CASES._TEST_CASE_0_TEST_CASE_1": {"doc_hash": "0b473b1a72bb68521ccb4492f82d6270279890718598445b13343ad44225e98a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_unet.py_TestUNET_": {"doc_hash": "367149dcd504976a9c2de42d190d3a5a8edfd03ffad23f450b497d28dccadad5"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_varautoencoder.py_unittest_CASES._TEST_CASE_0_TEST_CASE_1": {"doc_hash": "3f03437038ceb215e7317b0fc5ca9d6e4f7e93c0a7b76ccd511960fa89c04053"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_varautoencoder.py_TestVarAutoEncoder_": {"doc_hash": "f6be8516f699725c0e385422dabac763d2afa39ba498f77e1db17d83c74560d2"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vis_cam.py_unittest_TEST_CASE_3._": {"doc_hash": "c8642f36015efef5e5160ab7e746b1520d7d246cb641383240ea4991778da21c"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vis_cam.py_TestClassActivationMap_": {"doc_hash": "067165446db50106b9a08a85d8729e583ca0ac35fe77e5388e7799c7660c3e7b"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vis_gradcam.py_unittest_TEST_CASE_3._": {"doc_hash": "0b6c6b9eb7027aba242e3bd074b903726634d4270e3e34a5ced36e23ce7e5ea1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vis_gradcam.py_TestGradientClassActivationMap_": {"doc_hash": "e2ad106058ef55da2e30e2a34cc5e276986a389e77fdde693aa44896ee833673"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vis_gradcampp.py_unittest_TEST_CASE_3._": {"doc_hash": "c4e4243fff9c48d6020a0b535d207054e702d70455a216dd56a7dc97010139c4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vis_gradcampp.py_TestGradientClassActivationMapPP_": {"doc_hash": "8dbb22693511f989f3c4f5d42ed9c75a62096f685a336609b603c1876208e6e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_warp.py_unittest_LOW_POWER_TEST_CASES._run_with_BUILD_MONAI": {"doc_hash": "1e2664b24b7708d5ca737cbd12ae8dde70bbdcd4e9eafa16758e4360a31bfe03"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_warp.py_CPP_TEST_CASES_if_USE_COMPILED_.TEST_CASES_CPP_TEST_CA": {"doc_hash": "dbc7f149329cfd6552f0fa71669fa8888c8e1984cfc45ed11aa47279e0fbc0e4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_warp.py_TestWarp_TestWarp.test_ill_shape.None_2.warp_layer_image_torch_ar": {"doc_hash": "6e156427697cf44527a49368222f8a1355190cdb595795f5788aa1fb79460883"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_warp.py_TestWarp.test_grad_": {"doc_hash": "d59c983cfc4db6a2718d012ea6e0e210d8bc8e6b7c7b28b79c87e12422e7aeae"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_with_allow_missing_keys.py_unittest_TestWithAllowMissingKeysMode.test_map_transform.for_amk_in_True_False_.if_not_amk_.with_self_assertRaises_Ke._.t_self_data_": {"doc_hash": "cb6e61dfbd73314b75ef3aad892a97758e6fd51a72e7e18dd3b24b7d82c12165"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_with_allow_missing_keys.py_TestWithAllowMissingKeysMode.test_compose_TestWithAllowMissingKeysMode.test_compose.with_self_assertRaises_K._.t_self_data_": {"doc_hash": "01139a1baf93b9ea91196f793432cf9df0a3895521ce480d951db9f541ca9797"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_with_allow_missing_keys.py_TestWithAllowMissingKeysMode.test_array_transform_": {"doc_hash": "7290de0092561db42c9578e32a78cd87bc620f1f640e6cd22bad98931d06f730"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_write_metrics_reports.py_csv_": {"doc_hash": "d668ee744e868bcf27acce4b88c5271fd0e5224c5efb2eb1d0683c07cbd290ba"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zipdataset.py_unittest_TEST_CASE_4._": {"doc_hash": "08b6e70cbf62bb27769403b53079c3b32f704ed3b25cbe53a8bd905e37e8f67f"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zipdataset.py_TestZipDataset_": {"doc_hash": "21c11b983f9239b4112a6fa7468776fd07f28d59e8cce446a7c20732e07db217"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/testing_data/cpp_resample_answers.py_csv_": {"doc_hash": "3c125b69d19234655b9315edd696cbebf6223524ddc2959317bdd0e949193dd0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/testing_data/integration_answers.py_np_": {"doc_hash": "68599a3bf457bee442a1938052b75b4a98c4cc464c0478294d3a3d82ac1c6bbc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_datetime_skip_if_windows.return.unittest_skipIf_sys_platf": {"doc_hash": "9891be69c911e786f41b9ef39d71e2961acf0179cd5e34e040a63f1347277e57"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_SkipIfBeforePyTorchVersion_SkipIfBeforePyTorchVersion.__call__.return.unittest_skipIf_": {"doc_hash": "e7841bf863d727961afb1e29cb61d78d26cd798526f3c154eca28c41b1c903b4"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_SkipIfAtLeastPyTorchVersion_SkipIfAtLeastPyTorchVersion.__call__.return.unittest_skipIf_": {"doc_hash": "b10cc495d83fc67b28b76bf33bcf1cf706f9ba3be8387f2ad9b8608d496ba7da"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_make_rand_affine_DistTestCase.__setstate__.self___dict___update_data": {"doc_hash": "251ec322426829eced5e563bc25d77784ac733eea5a0a1637883f1369f2900e3"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_DistCall_DistCall.__init__.self.verbose.verbose": {"doc_hash": "2c17f3f6161e67d2f71e6cb9033d346590707c6376f708b1689f83e0020f3808"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_DistCall.run_process_DistCall.run_process.try_.finally_.try_.except_RuntimeError_as_e_.warnings_warn_f_While_clo": {"doc_hash": "711645cf666f04c59419c035979296ac678aa57febb9f1b9f66ab005ddbd2082"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_DistCall.__call___DistCall.__call__.return._wrapper": {"doc_hash": "05f2c58619ec4104a6d5d5484eaba39ea274307a72c8bfa5911e1cb7f68ab1dc"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_TimedCall_TimedCall.run_process.try_.except_Exception_as_e_.results_put_e_": {"doc_hash": "8f845775de00b72f7e813fa9ccea2609bdf8aec0f0ac31b81f52ef793e270f09"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_TimedCall.__call___TimedCall.__call__.return._wrapper": {"doc_hash": "10741bf3038517aff6c510770cf3b9a1fbbf0659a1b40fe02f85e7ff8561e5fb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py__original_funcs__call_original_func.return.f_args_kwargs_": {"doc_hash": "fdda1408485cddcc3d504624d59a34ad4c609f7ee66c73fb2e2bd64de1a93cc0"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_NumpyImageTestCase3D_TorchImageTestCase3D.setUp.self.segn.torch_tensor_self_segn_": {"doc_hash": "814f4806b84977f0b7f7a9cd7d5d24037c3e19b6ea492726c7387cc80e7107f7"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_test_script_save_test_script_save.for_i_r1_r2_in_enumer.if_None_not_in_r1_r2_.np_testing_assert_allclos": {"doc_hash": "bd6ab193339b9a2b69c97fd063ac20c4f3e7ddc82236b9bd73967cc55b773412"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py__Version_0_19_get_root.return.root": {"doc_hash": "82fa0c85cbc7c0384e0e0ba382598cf66a92347c0a0cc89580af4a77d00fee25"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_pep440_render_pep440.return.rendered": {"doc_hash": "01c58b16a9d2f48e661b642e5765298512fe65731c12944e3d8aa7eec9436b43"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_pep440_pre_render_pep440_pre.return.rendered": {"doc_hash": "068e049023aaab824be0d73855e398d30258feb5e3bb632365748c5e1a4e90eb"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmds_version_cmd_ver_get_cmdclass.if_setuptools_in_sys_mo.else_.from_distutils_command_bu": {"doc_hash": "c8e402f8dc78bf342b152a6b691e971e8aa2717b30c5601f2bd7bd281d42ec3a"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmd_build_ext_get_cmdclass.cmd_build_ext.run.write_to_version_file_tar": {"doc_hash": "f905a223aa951770fd030bd6e05830c702d1e3e81832360427986c65dca9723d"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmds_build_ext_cmd_b_get_cmdclass.if_sdist_in_cmds_.else_.from_distutils_command_sd": {"doc_hash": "6f1cc06e745b451d11be008b4389e0bfb92b446b3e6c8217c2908620314cfa15"}}, "docstore/data": {"/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py__Configuration_file_for__exclude_patterns._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py__Configuration_file_for__exclude_patterns._", "embedding": null, "metadata": {"file_path": "docs/source/conf.py", "file_name": "conf.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1, "end_line": 50, "span_ids": ["docstring"], "tokens": 331}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# Configuration file for the Sphinx documentation builder.\n#\n# This file only contains a selection of the most common options. For a full\n# list see the documentation:\n# https://www.sphinx-doc.org/en/master/usage/configuration.html\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\nimport subprocess\n\nsys.path.insert(0, os.path.abspath(\"..\"))\nsys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\")))\nprint(sys.path)\n\nimport monai # noqa: E402\n\n# -- Project information -----------------------------------------------------\nproject = \"MONAI\"\ncopyright = \"2020 - 2021 MONAI Consortium\"\nauthor = \"MONAI Contributors\"\n\n# The full version, including alpha/beta/rc tags\nshort_version = monai.__version__.split(\"+\")[0]\nrelease = short_version\nversion = short_version\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [\n \"transforms\",\n \"networks\",\n \"metrics\",\n \"engines\",\n \"data\",\n \"apps\",\n \"config\",\n \"handlers\",\n \"losses\",\n \"visualize\",\n \"utils\",\n \"inferers\",\n \"optimizers\",\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py_generate_apidocs_generate_apidocs.subprocess_check_call_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py_generate_apidocs_generate_apidocs.subprocess_check_call_", "embedding": null, "metadata": {"file_path": "docs/source/conf.py", "file_name": "conf.py", "file_type": "text/x-python", "category": "implementation", "start_line": 52, "end_line": 67, "span_ids": ["generate_apidocs"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def generate_apidocs(*args):\n \"\"\"Generate API docs automatically by trawling the available modules\"\"\"\n module_path = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\", \"monai\"))\n output_path = os.path.abspath(os.path.join(os.path.dirname(__file__), \"apidocs\"))\n apidoc_command_path = \"sphinx-apidoc\"\n if hasattr(sys, \"real_prefix\"): # called from a virtualenv\n apidoc_command_path = os.path.join(sys.prefix, \"bin\", \"sphinx-apidoc\")\n apidoc_command_path = os.path.abspath(apidoc_command_path)\n print(f\"output_path {output_path}\")\n print(f\"module_path {module_path}\")\n subprocess.check_call(\n [apidoc_command_path, \"-e\"]\n + [\"-o\", output_path]\n + [module_path]\n + [os.path.join(module_path, p) for p in exclude_patterns]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py_setup_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/docs/source/conf.py_setup_", "embedding": null, "metadata": {"file_path": "docs/source/conf.py", "file_name": "conf.py", "file_type": "text/x-python", "category": "implementation", "start_line": 70, "end_line": 134, "span_ids": ["impl:18", "setup"], "tokens": 530}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def setup(app):\n # Hook to allow for automatic generation of API docs\n # before doc deployment begins.\n app.connect(\"builder-inited\", generate_apidocs)\n\n\n# -- General configuration ---------------------------------------------------\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nsource_suffix = {\".rst\": \"restructuredtext\", \".txt\": \"restructuredtext\", \".md\": \"markdown\"}\n\nextensions = [\n \"recommonmark\",\n \"sphinx.ext.intersphinx\",\n \"sphinx.ext.mathjax\",\n \"sphinx.ext.napoleon\",\n \"sphinx.ext.autodoc\",\n \"sphinx.ext.viewcode\",\n \"sphinx.ext.autosectionlabel\",\n \"sphinx_autodoc_typehints\",\n]\n\nautoclass_content = \"both\"\nadd_module_names = True\nautosectionlabel_prefix_document = True\nnapoleon_use_param = True\nset_type_checking_flag = True\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# -- Options for HTML output -------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"sphinx_rtd_theme\"\n# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\nhtml_theme_options = {\n \"collapse_navigation\": True,\n \"display_version\": True,\n \"sticky_navigation\": True, # Set to False to disable the sticky nav while scrolling.\n \"logo_only\": True, # if we have a html_logo below, this shows /only/ the logo with no title text\n \"style_nav_header_background\": \"#FBFBFB\",\n}\nhtml_context = {\n \"display_github\": True,\n \"github_user\": \"Project-MONAI\",\n \"github_repo\": \"MONAI\",\n \"github_version\": \"master\",\n \"conf_py_path\": \"/docs/\",\n}\nhtml_scaled_image_link = False\nhtml_show_sourcelink = True\nhtml_favicon = \"../images/favicon.ico\"\nhtml_logo = \"../images/MONAI-logo-color.png\"\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"../_static\"]\nhtml_css_files = [\"custom.css\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/__init__.py_os_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/__init__.py_os_", "embedding": null, "metadata": {"file_path": "monai/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 63, "span_ids": ["docstring"], "tokens": 352}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport sys\n\nfrom ._version import get_versions\n\nPY_REQUIRED_MAJOR = 3\nPY_REQUIRED_MINOR = 6\n\nversion_dict = get_versions()\n__version__ = version_dict.get(\"version\", \"0+unknown\")\n__revision_id__ = version_dict.get(\"full-revisionid\")\ndel get_versions, version_dict\n\n__copyright__ = \"(c) 2020 - 2021 MONAI Consortium\"\n\n__basedir__ = os.path.dirname(__file__)\n\nif not (sys.version_info.major == PY_REQUIRED_MAJOR and sys.version_info.minor >= PY_REQUIRED_MINOR):\n raise RuntimeError(\n \"MONAI requires Python {}.{} or higher. But the current Python is: {}\".format(\n PY_REQUIRED_MAJOR, PY_REQUIRED_MINOR, sys.version\n ),\n )\n\nfrom .utils.module import load_submodules # noqa: E402\n\n# handlers_* have some external decorators the users may not have installed\n# *.so files and folder \"_C\" may not exist when the cpp extensions are not compiled\nexcludes = \"(^(monai.handlers))|((\\\\.so)$)|(^(monai._C))\"\n\n# load directory modules only, skip loading individual files\nload_submodules(sys.modules[__name__], False, exclude_pattern=excludes)\n\n# load all modules, this will trigger all export decorations\nload_submodules(sys.modules[__name__], True, exclude_pattern=excludes)\n\n__all__ = [\n \"apps\",\n \"config\",\n \"data\",\n \"engines\",\n \"handlers\",\n \"inferers\",\n \"losses\",\n \"metrics\",\n \"networks\",\n \"optimizers\",\n \"transforms\",\n \"utils\",\n \"visualize\",\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py__This_file_helps_to_comp_sys": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py__This_file_helps_to_comp_sys", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 2, "end_line": 17, "span_ids": ["docstring"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "# This file helps to compute a version number in source trees obtained from\n# git-archive tarball (such as those provided by githubs download-from-tag\n# feature). Distribution tarballs (built by setup.py sdist) and build\n# directories (produced by setup.py build) will contain a much shorter file\n# that just contains the computed version number.\n\n# This file is released into the public domain. Generated by\n# versioneer-0.19 (https://github.com/python-versioneer/python-versioneer)\n\n\"\"\"Git implementation of _version.py.\"\"\"\n\nimport errno\nimport os\nimport re\nimport subprocess\nimport sys", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_get_keywords_get_keywords.return.keywords": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_get_keywords_get_keywords.return.keywords", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 19, "end_line": 29, "span_ids": ["get_keywords"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_keywords():\n \"\"\"Get the keywords needed to look up the version information.\"\"\"\n # these strings will be replaced by git during git-archive.\n # setup.py/versioneer.py will grep for the variable names, so they must\n # each be defined on a line of their own. _version.py will just call\n # get_keywords().\n git_refnames = \"$Format:%d$\"\n git_full = \"$Format:%H$\"\n git_date = \"$Format:%ci$\"\n keywords = {\"refnames\": git_refnames, \"full\": git_full, \"date\": git_date}\n return keywords", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_VersioneerConfig_register_vcs_handler.return.decorate": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_VersioneerConfig_register_vcs_handler.return.decorate", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 33, "end_line": 67, "span_ids": ["VersioneerConfig", "impl", "NotThisMethod", "register_vcs_handler", "get_config"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class VersioneerConfig:\n \"\"\"Container for Versioneer configuration parameters.\"\"\"\n\n\ndef get_config():\n \"\"\"Create, populate and return the VersioneerConfig() object.\"\"\"\n # these strings are filled in when 'setup.py versioneer' creates\n # _version.py\n cfg = VersioneerConfig()\n cfg.VCS = \"git\"\n cfg.style = \"pep440\"\n cfg.tag_prefix = \"\"\n cfg.parentdir_prefix = \"\"\n cfg.versionfile_source = \"monai/_version.py\"\n cfg.verbose = False\n return cfg\n\n\nclass NotThisMethod(Exception):\n \"\"\"Exception raised if a method is not valid for the current scenario.\"\"\"\n\n\nLONG_VERSION_PY = {}\nHANDLERS = {}\n\n\ndef register_vcs_handler(vcs, method): # decorator\n \"\"\"Create decorator to mark a method as the handler of a VCS.\"\"\"\n def decorate(f):\n \"\"\"Store f in HANDLERS[vcs][method].\"\"\"\n if vcs not in HANDLERS:\n HANDLERS[vcs] = {}\n HANDLERS[vcs][method] = f\n return f\n return decorate", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_run_command_run_command.return.stdout_p_returncode": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_run_command_run_command.return.stdout_p_returncode", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 70, "end_line": 102, "span_ids": ["run_command"], "tokens": 274}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,\n env=None):\n \"\"\"Call the given command(s).\"\"\"\n assert isinstance(commands, list)\n p = None\n for c in commands:\n try:\n dispcmd = str([c] + args)\n # remember shell=False, so use git.cmd on windows, not just git\n p = subprocess.Popen([c] + args, cwd=cwd, env=env,\n stdout=subprocess.PIPE,\n stderr=(subprocess.PIPE if hide_stderr\n else None))\n break\n except EnvironmentError:\n e = sys.exc_info()[1]\n if e.errno == errno.ENOENT:\n continue\n if verbose:\n print(\"unable to run %s\" % dispcmd)\n print(e)\n return None, None\n else:\n if verbose:\n print(\"unable to find command, tried %s\" % (commands,))\n return None, None\n stdout = p.communicate()[0].strip().decode()\n if p.returncode != 0:\n if verbose:\n print(\"unable to run %s (error)\" % dispcmd)\n print(\"stdout was %s\" % stdout)\n return None, p.returncode\n return stdout, p.returncode", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_versions_from_parentdir_versions_from_parentdir.raise_NotThisMethod_root": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_versions_from_parentdir_versions_from_parentdir.raise_NotThisMethod_root", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 106, "end_line": 128, "span_ids": ["versions_from_parentdir"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def versions_from_parentdir(parentdir_prefix, root, verbose):\n \"\"\"Try to determine the version from the parent directory name.\n\n Source tarballs conventionally unpack into a directory that includes both\n the project name and a version string. We will also support searching up\n two directory levels for an appropriately named parent directory\n \"\"\"\n rootdirs = []\n\n for i in range(3):\n dirname = os.path.basename(root)\n if dirname.startswith(parentdir_prefix):\n return {\"version\": dirname[len(parentdir_prefix):],\n \"full-revisionid\": None,\n \"dirty\": False, \"error\": None, \"date\": None}\n else:\n rootdirs.append(root)\n root = os.path.dirname(root) # up a level\n\n if verbose:\n print(\"Tried directories %s but none started with prefix %s\" %\n (str(rootdirs), parentdir_prefix))\n raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_get_keywords_git_get_keywords.return.keywords": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_get_keywords_git_get_keywords.return.keywords", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 131, "end_line": 157, "span_ids": ["git_get_keywords"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@register_vcs_handler(\"git\", \"get_keywords\")\ndef git_get_keywords(versionfile_abs):\n \"\"\"Extract version information from the given file.\"\"\"\n # the code embedded in _version.py can just fetch the value of these\n # keywords. When used from setup.py, we don't want to import _version.py,\n # so we do it with a regexp instead. This function is not used from\n # _version.py.\n keywords = {}\n try:\n f = open(versionfile_abs, \"r\")\n for line in f.readlines():\n if line.strip().startswith(\"git_refnames =\"):\n mo = re.search(r'=\\s*\"(.*)\"', line)\n if mo:\n keywords[\"refnames\"] = mo.group(1)\n if line.strip().startswith(\"git_full =\"):\n mo = re.search(r'=\\s*\"(.*)\"', line)\n if mo:\n keywords[\"full\"] = mo.group(1)\n if line.strip().startswith(\"git_date =\"):\n mo = re.search(r'=\\s*\"(.*)\"', line)\n if mo:\n keywords[\"date\"] = mo.group(1)\n f.close()\n except EnvironmentError:\n pass\n return keywords", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_versions_from_keywords_git_versions_from_keywords.return._version_0_unknown_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_versions_from_keywords_git_versions_from_keywords.return._version_0_unknown_", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 159, "end_line": 215, "span_ids": ["git_versions_from_keywords"], "tokens": 745}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@register_vcs_handler(\"git\", \"keywords\")\ndef git_versions_from_keywords(keywords, tag_prefix, verbose):\n \"\"\"Get version information from git keywords.\"\"\"\n if not keywords:\n raise NotThisMethod(\"no keywords at all, weird\")\n date = keywords.get(\"date\")\n if date is not None:\n # Use only the last line. Previous lines may contain GPG signature\n # information.\n date = date.splitlines()[-1]\n\n # git-2.2.0 added \"%cI\", which expands to an ISO-8601 -compliant\n # datestamp. However we prefer \"%ci\" (which expands to an \"ISO-8601\n # -like\" string, which we must then edit to make compliant), because\n # it's been around since git-1.5.3, and it's too difficult to\n # discover which version we're using, or to work around using an\n # older one.\n date = date.strip().replace(\" \", \"T\", 1).replace(\" \", \"\", 1)\n refnames = keywords[\"refnames\"].strip()\n if refnames.startswith(\"$Format\"):\n if verbose:\n print(\"keywords are unexpanded, not using\")\n raise NotThisMethod(\"unexpanded keywords, not a git-archive tarball\")\n refs = set([r.strip() for r in refnames.strip(\"()\").split(\",\")])\n # starting in git-1.8.3, tags are listed as \"tag: foo-1.0\" instead of\n # just \"foo-1.0\". If we see a \"tag: \" prefix, prefer those.\n TAG = \"tag: \"\n tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])\n if not tags:\n # Either we're using git < 1.8.3, or there really are no tags. We use\n # a heuristic: assume all version tags have a digit. The old git %d\n # expansion behaves like git log --decorate=short and strips out the\n # refs/heads/ and refs/tags/ prefixes that would let us distinguish\n # between branches and tags. By ignoring refnames without digits, we\n # filter out many common branch names like \"release\" and\n # \"stabilization\", as well as \"HEAD\" and \"master\".\n tags = set([r for r in refs if re.search(r'\\d', r)])\n if verbose:\n print(\"discarding '%s', no digits\" % \",\".join(refs - tags))\n if verbose:\n print(\"likely tags: %s\" % \",\".join(sorted(tags)))\n for ref in sorted(tags):\n # sorting will prefer e.g. \"2.0\" over \"2.0rc1\"\n if ref.startswith(tag_prefix):\n r = ref[len(tag_prefix):]\n if verbose:\n print(\"picking %s\" % r)\n return {\"version\": r,\n \"full-revisionid\": keywords[\"full\"].strip(),\n \"dirty\": False, \"error\": None,\n \"date\": date}\n # no suitable tags, so version is \"0+unknown\", but full hex is still there\n if verbose:\n print(\"no suitable tags, using unknown + full revision id\")\n return {\"version\": \"0+unknown\",\n \"full-revisionid\": keywords[\"full\"].strip(),\n \"dirty\": False, \"error\": \"no suitable tags\", \"date\": None}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_pieces_from_vcs_git_pieces_from_vcs.return.pieces": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_git_pieces_from_vcs_git_pieces_from_vcs.return.pieces", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 218, "end_line": 310, "span_ids": ["git_pieces_from_vcs"], "tokens": 905}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@register_vcs_handler(\"git\", \"pieces_from_vcs\")\ndef git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):\n \"\"\"Get version from 'git describe' in the root of the source tree.\n\n This only gets called if the git-archive 'subst' keywords were *not*\n expanded, and _version.py hasn't already been rewritten with a short\n version string, meaning we're inside a checked out source tree.\n \"\"\"\n GITS = [\"git\"]\n if sys.platform == \"win32\":\n GITS = [\"git.cmd\", \"git.exe\"]\n\n out, rc = run_command(GITS, [\"rev-parse\", \"--git-dir\"], cwd=root,\n hide_stderr=True)\n if rc != 0:\n if verbose:\n print(\"Directory %s not under git control\" % root)\n raise NotThisMethod(\"'git rev-parse --git-dir' returned error\")\n\n # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]\n # if there isn't one, this yields HEX[-dirty] (no NUM)\n describe_out, rc = run_command(GITS, [\"describe\", \"--tags\", \"--dirty\",\n \"--always\", \"--long\",\n \"--match\", \"%s*\" % tag_prefix],\n cwd=root)\n # --long was added in git-1.5.5\n if describe_out is None:\n raise NotThisMethod(\"'git describe' failed\")\n describe_out = describe_out.strip()\n full_out, rc = run_command(GITS, [\"rev-parse\", \"HEAD\"], cwd=root)\n if full_out is None:\n raise NotThisMethod(\"'git rev-parse' failed\")\n full_out = full_out.strip()\n\n pieces = {}\n pieces[\"long\"] = full_out\n pieces[\"short\"] = full_out[:7] # maybe improved later\n pieces[\"error\"] = None\n\n # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]\n # TAG might have hyphens.\n git_describe = describe_out\n\n # look for -dirty suffix\n dirty = git_describe.endswith(\"-dirty\")\n pieces[\"dirty\"] = dirty\n if dirty:\n git_describe = git_describe[:git_describe.rindex(\"-dirty\")]\n\n # now we have TAG-NUM-gHEX or HEX\n\n if \"-\" in git_describe:\n # TAG-NUM-gHEX\n mo = re.search(r'^(.+)-(\\d+)-g([0-9a-f]+)$', git_describe)\n if not mo:\n # unparseable. Maybe git-describe is misbehaving?\n pieces[\"error\"] = (\"unable to parse git-describe output: '%s'\"\n % describe_out)\n return pieces\n\n # tag\n full_tag = mo.group(1)\n if not full_tag.startswith(tag_prefix):\n if verbose:\n fmt = \"tag '%s' doesn't start with prefix '%s'\"\n print(fmt % (full_tag, tag_prefix))\n pieces[\"error\"] = (\"tag '%s' doesn't start with prefix '%s'\"\n % (full_tag, tag_prefix))\n return pieces\n pieces[\"closest-tag\"] = full_tag[len(tag_prefix):]\n\n # distance: number of commits since tag\n pieces[\"distance\"] = int(mo.group(2))\n\n # commit: short hex revision ID\n pieces[\"short\"] = mo.group(3)\n\n else:\n # HEX: no tags\n pieces[\"closest-tag\"] = None\n count_out, rc = run_command(GITS, [\"rev-list\", \"HEAD\", \"--count\"],\n cwd=root)\n pieces[\"distance\"] = int(count_out) # total number of commits\n\n # commit date: see ISO-8601 comment in git_versions_from_keywords()\n date = run_command(GITS, [\"show\", \"-s\", \"--format=%ci\", \"HEAD\"],\n cwd=root)[0].strip()\n # Use only the last line. Previous lines may contain GPG signature\n # information.\n date = date.splitlines()[-1]\n pieces[\"date\"] = date.strip().replace(\" \", \"T\", 1).replace(\" \", \"\", 1)\n\n return pieces", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_plus_or_dot_render_pep440.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_plus_or_dot_render_pep440.return.rendered", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 307, "end_line": 336, "span_ids": ["plus_or_dot", "render_pep440"], "tokens": 258}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def plus_or_dot(pieces):\n \"\"\"Return a + if we don't already have one, else return a .\"\"\"\n if \"+\" in pieces.get(\"closest-tag\", \"\"):\n return \".\"\n return \"+\"\n\n\ndef render_pep440(pieces):\n \"\"\"Build up version string, with post-release \"local version identifier\".\n\n Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you\n get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty\n\n Exceptions:\n 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n if pieces[\"distance\"] or pieces[\"dirty\"]:\n rendered += plus_or_dot(pieces)\n rendered += \"%d.g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n if pieces[\"dirty\"]:\n rendered += \".dirty\"\n else:\n # exception #1\n rendered = \"0+untagged.%d.g%s\" % (pieces[\"distance\"],\n pieces[\"short\"])\n if pieces[\"dirty\"]:\n rendered += \".dirty\"\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_old_render_pep440_old.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_old_render_pep440_old.return.rendered", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 382, "end_line": 401, "span_ids": ["render_pep440_old"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_pep440_old(pieces):\n \"\"\"TAG[.postDISTANCE[.dev0]] .\n\n The \".dev0\" means dirty.\n\n Exceptions:\n 1: no tags. 0.postDISTANCE[.dev0]\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n if pieces[\"distance\"] or pieces[\"dirty\"]:\n rendered += \".post%d\" % pieces[\"distance\"]\n if pieces[\"dirty\"]:\n rendered += \".dev0\"\n else:\n # exception #1\n rendered = \"0.post%d\" % pieces[\"distance\"]\n if pieces[\"dirty\"]:\n rendered += \".dev0\"\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_git_describe_render_git_describe.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_git_describe_render_git_describe.return.rendered", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 404, "end_line": 421, "span_ids": ["render_git_describe"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_git_describe(pieces):\n \"\"\"TAG[-DISTANCE-gHEX][-dirty].\n\n Like 'git describe --tags --dirty --always'.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no 'g' prefix)\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n if pieces[\"distance\"]:\n rendered += \"-%d-g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n else:\n # exception #1\n rendered = pieces[\"short\"]\n if pieces[\"dirty\"]:\n rendered += \"-dirty\"\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_git_describe_long_render_git_describe_long.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_git_describe_long_render_git_describe_long.return.rendered", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 424, "end_line": 441, "span_ids": ["render_git_describe_long"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_git_describe_long(pieces):\n \"\"\"TAG-DISTANCE-gHEX[-dirty].\n\n Like 'git describe --tags --dirty --always -long'.\n The distance/hash is unconditional.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no 'g' prefix)\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n rendered += \"-%d-g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n else:\n # exception #1\n rendered = pieces[\"short\"]\n if pieces[\"dirty\"]:\n rendered += \"-dirty\"\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_render.return._version_rendered_fu": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_render.return._version_rendered_fu", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 444, "end_line": 473, "span_ids": ["render"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render(pieces, style):\n \"\"\"Render the given version pieces into the requested style.\"\"\"\n if pieces[\"error\"]:\n return {\"version\": \"unknown\",\n \"full-revisionid\": pieces.get(\"long\"),\n \"dirty\": None,\n \"error\": pieces[\"error\"],\n \"date\": None}\n\n if not style or style == \"default\":\n style = \"pep440\" # the default\n\n if style == \"pep440\":\n rendered = render_pep440(pieces)\n elif style == \"pep440-pre\":\n rendered = render_pep440_pre(pieces)\n elif style == \"pep440-post\":\n rendered = render_pep440_post(pieces)\n elif style == \"pep440-old\":\n rendered = render_pep440_old(pieces)\n elif style == \"git-describe\":\n rendered = render_git_describe(pieces)\n elif style == \"git-describe-long\":\n rendered = render_git_describe_long(pieces)\n else:\n raise ValueError(\"unknown style '%s'\" % style)\n\n return {\"version\": rendered, \"full-revisionid\": pieces[\"long\"],\n \"dirty\": pieces[\"dirty\"], \"error\": None,\n \"date\": pieces.get(\"date\")}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_get_versions_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_get_versions_", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 482, "end_line": 526, "span_ids": ["get_versions"], "tokens": 365}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_versions():\n \"\"\"Get version information or return default if unable to do so.\"\"\"\n # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have\n # __file__, we can work backwards from there to the root. Some\n # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which\n # case we can only use expanded keywords.\n\n cfg = get_config()\n verbose = cfg.verbose\n\n try:\n return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,\n verbose)\n except NotThisMethod:\n pass\n\n try:\n root = os.path.realpath(__file__)\n # versionfile_source is the relative path from the top of the source\n # tree (where the .git directory might live) to this file. Invert\n # this to find the root from __file__.\n for i in cfg.versionfile_source.split('/'):\n root = os.path.dirname(root)\n except NameError:\n return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n \"dirty\": None,\n \"error\": \"unable to find root of source tree\",\n \"date\": None}\n\n try:\n pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)\n return render(pieces, cfg.style)\n except NotThisMethod:\n pass\n\n try:\n if cfg.parentdir_prefix:\n return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n except NotThisMethod:\n pass\n\n return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n \"dirty\": None,\n \"error\": \"unable to compute version\", \"date\": None}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset_DecathlonDataset._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset_DecathlonDataset._", "embedding": null, "metadata": {"file_path": "monai/apps/datasets.py", "file_name": "datasets.py", "file_type": "text/x-python", "category": "implementation", "start_line": 158, "end_line": 211, "span_ids": ["DecathlonDataset"], "tokens": 723}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DecathlonDataset(Randomizable, CacheDataset):\n \"\"\"\n The Dataset to automatically download the data of Medical Segmentation Decathlon challenge\n (http://medicaldecathlon.com/) and generate items for training, validation or test.\n It will also load these properties from the JSON config file of dataset. user can call `get_properties()`\n to get specified properties or all the properties loaded.\n It's based on :py:class:`monai.data.CacheDataset` to accelerate the training process.\n\n Args:\n root_dir: user's local directory for caching and loading the MSD datasets.\n task: which task to download and execute: one of list (\"Task01_BrainTumour\", \"Task02_Heart\",\n \"Task03_Liver\", \"Task04_Hippocampus\", \"Task05_Prostate\", \"Task06_Lung\", \"Task07_Pancreas\",\n \"Task08_HepaticVessel\", \"Task09_Spleen\", \"Task10_Colon\").\n section: expected data section, can be: `training`, `validation` or `test`.\n transform: transforms to execute operations on input data.\n for further usage, use `AddChanneld` or `AsChannelFirstd` to convert the shape to [C, H, W, D].\n download: whether to download and extract the Decathlon from resource link, default is False.\n if expected file already exists, skip downloading even set it to True.\n val_frac: percentage of of validation fraction in the whole dataset, default is 0.2.\n user can manually copy tar file or dataset folder to the root directory.\n seed: random seed to randomly shuffle the datalist before splitting into training and validation, default is 0.\n note to set same seed for `training` and `validation` sections.\n cache_num: number of items to be cached. Default is `sys.maxsize`.\n will take the minimum of (cache_num, data_length x cache_rate, data_length).\n cache_rate: percentage of cached data in total, default is 1.0 (cache all).\n will take the minimum of (cache_num, data_length x cache_rate, data_length).\n num_workers: the number of worker threads to use.\n if 0 a single thread will be used. Default is 0.\n\n Raises:\n ValueError: When ``root_dir`` is not a directory.\n ValueError: When ``task`` is not one of [\"Task01_BrainTumour\", \"Task02_Heart\",\n \"Task03_Liver\", \"Task04_Hippocampus\", \"Task05_Prostate\", \"Task06_Lung\", \"Task07_Pancreas\",\n \"Task08_HepaticVessel\", \"Task09_Spleen\", \"Task10_Colon\"].\n RuntimeError: When ``dataset_dir`` doesn't exist and downloading is not selected (``download=False``).\n\n Example::\n\n transform = Compose(\n [\n LoadImaged(keys=[\"image\", \"label\"]),\n AddChanneld(keys=[\"image\", \"label\"]),\n ScaleIntensityd(keys=\"image\"),\n ToTensord(keys=[\"image\", \"label\"]),\n ]\n )\n\n val_data = DecathlonDataset(\n root_dir=\"./\", task=\"Task09_Spleen\", transform=transform, section=\"validation\", seed=12345, download=True\n )\n\n print(val_data[0][\"image\"], val_data[0][\"label\"])\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset.resource_DecathlonDataset.md5._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset.resource_DecathlonDataset.md5._", "embedding": null, "metadata": {"file_path": "monai/apps/datasets.py", "file_name": "datasets.py", "file_type": "text/x-python", "category": "implementation", "start_line": 193, "end_line": 216, "span_ids": ["DecathlonDataset"], "tokens": 628}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DecathlonDataset(Randomizable, CacheDataset):\n\n resource = {\n \"Task01_BrainTumour\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task01_BrainTumour.tar\",\n \"Task02_Heart\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task02_Heart.tar\",\n \"Task03_Liver\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task03_Liver.tar\",\n \"Task04_Hippocampus\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task04_Hippocampus.tar\",\n \"Task05_Prostate\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task05_Prostate.tar\",\n \"Task06_Lung\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task06_Lung.tar\",\n \"Task07_Pancreas\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task07_Pancreas.tar\",\n \"Task08_HepaticVessel\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task08_HepaticVessel.tar\",\n \"Task09_Spleen\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task09_Spleen.tar\",\n \"Task10_Colon\": \"https://msd-for-monai.s3-us-west-2.amazonaws.com/Task10_Colon.tar\",\n }\n md5 = {\n \"Task01_BrainTumour\": \"240a19d752f0d9e9101544901065d872\",\n \"Task02_Heart\": \"06ee59366e1e5124267b774dbd654057\",\n \"Task03_Liver\": \"a90ec6c4aa7f6a3d087205e23d4e6397\",\n \"Task04_Hippocampus\": \"9d24dba78a72977dbd1d2e110310f31b\",\n \"Task05_Prostate\": \"35138f08b1efaef89d7424d2bcc928db\",\n \"Task06_Lung\": \"8afd997733c7fc0432f71255ba4e52dc\",\n \"Task07_Pancreas\": \"4f7080cfca169fa8066d17ce6eb061e4\",\n \"Task08_HepaticVessel\": \"641d79e80ec66453921d997fbf12a29c\",\n \"Task09_Spleen\": \"410d4a301da4e5b2f6f86ec3ddba524e\",\n \"Task10_Colon\": \"bad7a188931dc2f6acf72b08eb6202d0\",\n }", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/__init__.py_CSVSaver_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/__init__.py_CSVSaver_", "embedding": null, "metadata": {"file_path": "monai/data/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 65, "span_ids": ["imports:18", "docstring"], "tokens": 397}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from .csv_saver import CSVSaver\nfrom .dataloader import DataLoader\nfrom .dataset import (\n ArrayDataset,\n CacheDataset,\n CacheNTransDataset,\n Dataset,\n LMDBDataset,\n NPZDictItemDataset,\n PersistentDataset,\n SmartCacheDataset,\n ZipDataset,\n)\nfrom .decathlon_datalist import load_decathlon_datalist, load_decathlon_properties\nfrom .grid_dataset import GridPatchDataset, PatchDataset, PatchIter\nfrom .image_dataset import ImageDataset\nfrom .image_reader import ImageReader, ITKReader, NibabelReader, NumpyReader, PILReader, WSIReader\nfrom .inverse_batch_transform import BatchInverseTransform\nfrom .iterable_dataset import IterableDataset\nfrom .nifti_saver import NiftiSaver\nfrom .nifti_writer import write_nifti\nfrom .png_saver import PNGSaver\nfrom .png_writer import write_png\nfrom .samplers import DistributedSampler, DistributedWeightedRandomSampler\nfrom .synthetic import create_test_image_2d, create_test_image_3d\nfrom .test_time_augmentation import TestTimeAugmentation\nfrom .thread_buffer import ThreadBuffer, ThreadDataLoader\nfrom .utils import (\n compute_importance_map,\n compute_shape_offset,\n correct_nifti_header_if_necessary,\n create_file_basename,\n decollate_batch,\n dense_patch_slices,\n get_random_patch,\n get_valid_patch_size,\n is_supported_format,\n iter_patch,\n iter_patch_slices,\n json_hashing,\n list_data_collate,\n pad_list_data_collate,\n partition_dataset,\n partition_dataset_classes,\n pickle_hashing,\n rectify_header_sform_qform,\n select_cross_validation_folds,\n set_rnd,\n sorted_dict,\n to_affine_nd,\n worker_init_fn,\n zoom_affine,\n)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.finalize_CSVSaver.finalize.with_open_self__filepath_.for_k_v_in_self__cache_d.f_write_n_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.finalize_CSVSaver.finalize.with_open_self__filepath_.for_k_v_in_self__cache_d.f_write_n_", "embedding": null, "metadata": {"file_path": "monai/data/csv_saver.py", "file_name": "csv_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 46, "end_line": 64, "span_ids": ["CSVSaver.finalize"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CSVSaver:\n\n def finalize(self) -> None:\n \"\"\"\n Writes the cached dict to a csv\n\n \"\"\"\n if not self.overwrite and os.path.exists(self._filepath):\n with open(self._filepath, \"r\") as f:\n reader = csv.reader(f)\n for row in reader:\n self._cache_dict[row[0]] = np.array(row[1:]).astype(np.float32)\n\n if not os.path.exists(self.output_dir):\n os.makedirs(self.output_dir)\n with open(self._filepath, \"w\") as f:\n for k, v in self._cache_dict.items():\n f.write(k)\n for result in v.flatten():\n f.write(\",\" + str(result))\n f.write(\"\\n\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.save_CSVSaver.save.self__cache_dict_save_key": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.save_CSVSaver.save.self__cache_dict_save_key", "embedding": null, "metadata": {"file_path": "monai/data/csv_saver.py", "file_name": "csv_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 68, "end_line": 84, "span_ids": ["CSVSaver.save"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CSVSaver:\n\n def save(self, data: Union[torch.Tensor, np.ndarray], meta_data: Optional[Dict] = None) -> None:\n \"\"\"Save data into the cache dictionary. The metadata should have the following key:\n - ``'filename_or_obj'`` -- save the data corresponding to file name or object.\n If meta_data is None, use the default index from 0 to save data instead.\n\n Args:\n data: target data content that save into cache.\n meta_data: the meta data information corresponding to the data.\n\n \"\"\"\n save_key = meta_data[Key.FILENAME_OR_OBJ] if meta_data else str(self._data_index)\n self._data_index += 1\n if isinstance(data, torch.Tensor):\n data = data.detach().cpu().numpy()\n if not isinstance(data, np.ndarray):\n raise AssertionError\n self._cache_dict[save_key] = data.astype(np.float32)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.save_batch_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_CSVSaver.save_batch_", "embedding": null, "metadata": {"file_path": "monai/data/csv_saver.py", "file_name": "csv_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 83, "end_line": 93, "span_ids": ["CSVSaver.save_batch"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CSVSaver:\n\n def save_batch(self, batch_data: Union[torch.Tensor, np.ndarray], meta_data: Optional[Dict] = None) -> None:\n \"\"\"Save a batch of data into the cache dictionary.\n\n Args:\n batch_data: target batch data content that save into cache.\n meta_data: every key-value in the meta_data is corresponding to 1 batch of data.\n\n \"\"\"\n for i, data in enumerate(batch_data): # save a batch of files\n self.save(data, {k: meta_data[k][i] for k in meta_data} if meta_data else None)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_saver.py_NiftiSaver.save_NiftiSaver.save.write_nifti_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_saver.py_NiftiSaver.save_NiftiSaver.save.write_nifti_", "embedding": null, "metadata": {"file_path": "monai/data/nifti_saver.py", "file_name": "nifti_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 96, "end_line": 155, "span_ids": ["NiftiSaver.save"], "tokens": 649}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NiftiSaver:\n\n def save(self, data: Union[torch.Tensor, np.ndarray], meta_data: Optional[Dict] = None) -> None:\n \"\"\"\n Save data into a Nifti file.\n The meta_data could optionally have the following keys:\n\n - ``'filename_or_obj'`` -- for output file name creation, corresponding to filename or object.\n - ``'original_affine'`` -- for data orientation handling, defaulting to an identity matrix.\n - ``'affine'`` -- for data output affine, defaulting to an identity matrix.\n - ``'spatial_shape'`` -- for data output shape.\n - ``'patch_index'`` -- if the data is a patch of big image, append the patch index to filename.\n\n When meta_data is specified, the saver will try to resample batch data from the space\n defined by \"affine\" to the space defined by \"original_affine\".\n\n If meta_data is None, use the default index (starting from 0) as the filename.\n\n Args:\n data: target data content that to be saved as a NIfTI format file.\n Assuming the data shape starts with a channel dimension and followed by spatial dimensions.\n meta_data: the meta data information corresponding to the data.\n\n See Also\n :py:meth:`monai.data.nifti_writer.write_nifti`\n \"\"\"\n filename = meta_data[Key.FILENAME_OR_OBJ] if meta_data else str(self._data_index)\n self._data_index += 1\n original_affine = meta_data.get(\"original_affine\", None) if meta_data else None\n affine = meta_data.get(\"affine\", None) if meta_data else None\n spatial_shape = meta_data.get(\"spatial_shape\", None) if meta_data else None\n patch_index = meta_data.get(Key.PATCH_INDEX, None) if meta_data else None\n\n if isinstance(data, torch.Tensor):\n data = data.detach().cpu().numpy()\n\n path = create_file_basename(self.output_postfix, filename, self.output_dir, self.data_root_dir, patch_index)\n path = f\"{path}{self.output_ext}\"\n # change data shape to be (channel, h, w, d)\n while len(data.shape) < 4:\n data = np.expand_dims(data, -1)\n # change data to \"channel last\" format and write to nifti format file\n data = np.moveaxis(np.asarray(data), 0, -1)\n\n # if desired, remove trailing singleton dimensions\n if self.squeeze_end_dims:\n while data.shape[-1] == 1:\n data = np.squeeze(data, -1)\n\n write_nifti(\n data,\n file_name=path,\n affine=affine,\n target_affine=original_affine,\n resample=self.resample,\n output_spatial_shape=spatial_shape,\n mode=self.mode,\n padding_mode=self.padding_mode,\n align_corners=self.align_corners,\n dtype=self.dtype,\n output_dtype=self.output_dtype,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_saver.py_NiftiSaver.save_batch_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_saver.py_NiftiSaver.save_batch_", "embedding": null, "metadata": {"file_path": "monai/data/nifti_saver.py", "file_name": "nifti_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 157, "end_line": 178, "span_ids": ["NiftiSaver.save_batch"], "tokens": 297}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NiftiSaver:\n\n def save_batch(self, batch_data: Union[torch.Tensor, np.ndarray], meta_data: Optional[Dict] = None) -> None:\n \"\"\"\n Save a batch of data into Nifti format files.\n\n Spatially it supports up to three dimensions, that is, H, HW, HWD for\n 1D, 2D, 3D respectively (with resampling supports for 2D and 3D only).\n\n When saving multiple time steps or multiple channels `batch_data`,\n time and/or modality axes should be appended after the batch dimensions.\n For example, the shape of a batch of 2D eight-class\n segmentation probabilities to be saved could be `(batch, 8, 64, 64)`;\n in this case each item in the batch will be saved as (64, 64, 1, 8)\n NIfTI file (the third dimension is reserved as a spatial dimension).\n\n Args:\n batch_data: target batch data content that save into NIfTI format.\n meta_data: every key-value in the meta_data is corresponding to a batch of data.\n\n \"\"\"\n for i, data in enumerate(batch_data): # save a batch of files\n self.save(data=data, meta_data={k: meta_data[k][i] for k in meta_data} if meta_data is not None else None)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_writer.py_from_typing_import_Option_write_nifti._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_writer.py_from_typing_import_Option_write_nifti._", "embedding": null, "metadata": {"file_path": "monai/data/nifti_writer.py", "file_name": "nifti_writer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 91, "span_ids": ["write_nifti", "docstring"], "tokens": 1049}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Sequence, Union\n\nimport numpy as np\nimport torch\n\nfrom monai.config import DtypeLike\nfrom monai.data.utils import compute_shape_offset, to_affine_nd\nfrom monai.networks.layers import AffineTransform\nfrom monai.utils import GridSampleMode, GridSamplePadMode, optional_import\n\nnib, _ = optional_import(\"nibabel\")\n\n\ndef write_nifti(\n data: np.ndarray,\n file_name: str,\n affine: Optional[np.ndarray] = None,\n target_affine: Optional[np.ndarray] = None,\n resample: bool = True,\n output_spatial_shape: Union[Sequence[int], np.ndarray, None] = None,\n mode: Union[GridSampleMode, str] = GridSampleMode.BILINEAR,\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.BORDER,\n align_corners: bool = False,\n dtype: DtypeLike = np.float64,\n output_dtype: DtypeLike = np.float32,\n) -> None:\n \"\"\"\n Write numpy data into NIfTI files to disk. This function converts data\n into the coordinate system defined by `target_affine` when `target_affine`\n is specified.\n\n If the coordinate transform between `affine` and `target_affine` could be\n achieved by simply transposing and flipping `data`, no resampling will\n happen. otherwise this function will resample `data` using the coordinate\n transform computed from `affine` and `target_affine`. Note that the shape\n of the resampled `data` may subject to some rounding errors. For example,\n resampling a 20x20 pixel image from pixel size (1.5, 1.5)-mm to (3.0,\n 3.0)-mm space will return a 10x10-pixel image. However, resampling a\n 20x20-pixel image from pixel size (2.0, 2.0)-mm to (3.0, 3.0)-mma space\n will output a 14x14-pixel image, where the image shape is rounded from\n 13.333x13.333 pixels. In this case `output_spatial_shape` could be specified so\n that this function writes image data to a designated shape.\n\n When `affine` and `target_affine` are None, the data will be saved with an\n identity matrix as the image affine.\n\n This function assumes the NIfTI dimension notations.\n Spatially it supports up to three dimensions, that is, H, HW, HWD for\n 1D, 2D, 3D respectively.\n When saving multiple time steps or multiple channels `data`, time and/or\n modality axes should be appended after the first three dimensions. For\n example, shape of 2D eight-class segmentation probabilities to be saved\n could be `(64, 64, 1, 8)`. Also, data in shape (64, 64, 8), (64, 64, 8, 1)\n will be considered as a single-channel 3D image.\n\n Args:\n data: input data to write to file.\n file_name: expected file name that saved on disk.\n affine: the current affine of `data`. Defaults to `np.eye(4)`\n target_affine: before saving\n the (`data`, `affine`) as a Nifti1Image,\n transform the data into the coordinates defined by `target_affine`.\n resample: whether to run resampling when the target affine\n could not be achieved by swapping/flipping data axes.\n output_spatial_shape: spatial shape of the output image.\n This option is used when resample = True.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n This option is used when ``resample = True``.\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n This option is used when ``resample = True``.\n Padding mode for outside grid values. Defaults to ``\"border\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n align_corners: Geometrically, we consider the pixels of the input as squares rather than points.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n dtype: data type for resampling computation. Defaults to ``np.float64`` for best precision.\n If None, use the data type of input data.\n output_dtype: data type for saving data. Defaults to ``np.float32``.\n \"\"\"\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_writer.py_write_nifti.if_data_ndim_3_mult_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/nifti_writer.py_write_nifti.if_data_ndim_3_mult_", "embedding": null, "metadata": {"file_path": "monai/data/nifti_writer.py", "file_name": "nifti_writer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 130, "end_line": 157, "span_ids": ["write_nifti"], "tokens": 526}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def write_nifti(\n data: np.ndarray,\n file_name: str,\n affine: Optional[np.ndarray] = None,\n target_affine: Optional[np.ndarray] = None,\n resample: bool = True,\n output_spatial_shape: Union[Sequence[int], np.ndarray, None] = None,\n mode: Union[GridSampleMode, str] = GridSampleMode.BILINEAR,\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.BORDER,\n align_corners: bool = False,\n dtype: DtypeLike = np.float64,\n output_dtype: DtypeLike = np.float32,\n) -> None:\n # ... other code\n if data.ndim > 3: # multi channel, resampling each channel\n while len(output_spatial_shape_) < 3:\n output_spatial_shape_ = output_spatial_shape_ + [1]\n spatial_shape, channel_shape = data.shape[:3], data.shape[3:]\n data_np = data.reshape(list(spatial_shape) + [-1])\n data_np = np.moveaxis(data_np, -1, 0) # channel first for pytorch\n data_torch = affine_xform(\n torch.as_tensor(np.ascontiguousarray(data_np).astype(dtype)).unsqueeze(0),\n torch.as_tensor(np.ascontiguousarray(transform).astype(dtype)),\n spatial_size=output_spatial_shape_[:3],\n )\n data_np = data_torch.squeeze(0).detach().cpu().numpy()\n data_np = np.moveaxis(data_np, 0, -1) # channel last for nifti\n data_np = data_np.reshape(list(data_np.shape[:3]) + list(channel_shape))\n else: # single channel image, need to expand to have batch and channel\n while len(output_spatial_shape_) < len(data.shape):\n output_spatial_shape_ = output_spatial_shape_ + [1]\n data_torch = affine_xform(\n torch.as_tensor(np.ascontiguousarray(data).astype(dtype)[None, None]),\n torch.as_tensor(np.ascontiguousarray(transform).astype(dtype)),\n spatial_size=output_spatial_shape_[: len(data.shape)],\n )\n data_np = data_torch.squeeze(0).squeeze(0).detach().cpu().numpy()\n\n results_img = nib.Nifti1Image(data_np.astype(output_dtype), to_affine_nd(3, target_affine))\n nib.save(results_img, file_name)\n return", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_PNGSaver.save_PNGSaver.save.write_png_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_PNGSaver.save_PNGSaver.save.write_png_", "embedding": null, "metadata": {"file_path": "monai/data/png_saver.py", "file_name": "png_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 74, "end_line": 123, "span_ids": ["PNGSaver.save"], "tokens": 493}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PNGSaver:\n\n def save(self, data: Union[torch.Tensor, np.ndarray], meta_data: Optional[Dict] = None) -> None:\n \"\"\"\n Save data into a png file.\n The meta_data could optionally have the following keys:\n\n - ``'filename_or_obj'`` -- for output file name creation, corresponding to filename or object.\n - ``'spatial_shape'`` -- for data output shape.\n - ``'patch_index'`` -- if the data is a patch of big image, append the patch index to filename.\n\n If meta_data is None, use the default index (starting from 0) as the filename.\n\n Args:\n data: target data content that to be saved as a png format file.\n Assuming the data shape are spatial dimensions.\n Shape of the spatial dimensions (C,H,W).\n C should be 1, 3 or 4\n meta_data: the meta data information corresponding to the data.\n\n Raises:\n ValueError: When ``data`` channels is not one of [1, 3, 4].\n\n See Also\n :py:meth:`monai.data.png_writer.write_png`\n\n \"\"\"\n filename = meta_data[Key.FILENAME_OR_OBJ] if meta_data else str(self._data_index)\n self._data_index += 1\n spatial_shape = meta_data.get(\"spatial_shape\", None) if meta_data and self.resample else None\n patch_index = meta_data.get(Key.PATCH_INDEX, None) if meta_data else None\n\n if isinstance(data, torch.Tensor):\n data = data.detach().cpu().numpy()\n\n path = create_file_basename(self.output_postfix, filename, self.output_dir, self.data_root_dir, patch_index)\n path = f\"{path}{self.output_ext}\"\n\n if data.shape[0] == 1:\n data = data.squeeze(0)\n elif 2 < data.shape[0] < 5:\n data = np.moveaxis(np.asarray(data), 0, -1)\n else:\n raise ValueError(f\"Unsupported number of channels: {data.shape[0]}, available options are [1, 3, 4]\")\n\n write_png(\n np.asarray(data),\n file_name=path,\n output_spatial_shape=spatial_shape,\n mode=self.mode,\n scale=self.scale,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_PNGSaver.save_batch_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_PNGSaver.save_batch_", "embedding": null, "metadata": {"file_path": "monai/data/png_saver.py", "file_name": "png_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 125, "end_line": 135, "span_ids": ["PNGSaver.save_batch"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PNGSaver:\n\n def save_batch(self, batch_data: Union[torch.Tensor, np.ndarray], meta_data: Optional[Dict] = None) -> None:\n \"\"\"Save a batch of data into png format files.\n\n Args:\n batch_data: target batch data content that save into png format.\n meta_data: every key-value in the meta_data is corresponding to a batch of data.\n\n \"\"\"\n for i, data in enumerate(batch_data): # save a batch of files\n self.save(data=data, meta_data={k: meta_data[k][i] for k in meta_data} if meta_data is not None else None)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_writer.py_from_typing_import_Option_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_writer.py_from_typing_import_Option_", "embedding": null, "metadata": {"file_path": "monai/data/png_writer.py", "file_name": "png_writer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 86, "span_ids": ["write_png", "docstring"], "tokens": 861}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Sequence, Union\n\nimport numpy as np\n\nfrom monai.transforms.spatial.array import Resize\nfrom monai.utils import InterpolateMode, ensure_tuple_rep, optional_import\n\nImage, _ = optional_import(\"PIL\", name=\"Image\")\n\n\ndef write_png(\n data: np.ndarray,\n file_name: str,\n output_spatial_shape: Optional[Sequence[int]] = None,\n mode: Union[InterpolateMode, str] = InterpolateMode.BICUBIC,\n scale: Optional[int] = None,\n) -> None:\n \"\"\"\n Write numpy data into png files to disk.\n Spatially it supports HW for 2D.(H,W) or (H,W,3) or (H,W,4).\n If `scale` is None, expect the input data in `np.uint8` or `np.uint16` type.\n It's based on the Image module in PIL library:\n https://pillow.readthedocs.io/en/stable/reference/Image.html\n\n Args:\n data: input data to write to file.\n file_name: expected file name that saved on disk.\n output_spatial_shape: spatial shape of the output image.\n mode: {``\"nearest\"``, ``\"linear\"``, ``\"bilinear\"``, ``\"bicubic\"``, ``\"trilinear\"``, ``\"area\"``}\n The interpolation mode. Defaults to ``\"bicubic\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n scale: {``255``, ``65535``} postprocess data by clipping to [0, 1] and scaling to\n [0, 255] (uint8) or [0, 65535] (uint16). Default is None to disable scaling.\n\n Raises:\n ValueError: When ``scale`` is not one of [255, 65535].\n\n \"\"\"\n if not isinstance(data, np.ndarray):\n raise AssertionError(\"input data must be numpy array.\")\n if len(data.shape) == 3 and data.shape[2] == 1: # PIL Image can't save image with 1 channel\n data = data.squeeze(2)\n if output_spatial_shape is not None:\n output_spatial_shape_ = ensure_tuple_rep(output_spatial_shape, 2)\n mode = InterpolateMode(mode)\n align_corners = None if mode in (InterpolateMode.NEAREST, InterpolateMode.AREA) else False\n xform = Resize(spatial_size=output_spatial_shape_, mode=mode, align_corners=align_corners)\n _min, _max = np.min(data), np.max(data)\n if len(data.shape) == 3:\n data = np.moveaxis(data, -1, 0) # to channel first\n data = xform(data)\n data = np.moveaxis(data, 0, -1)\n else: # (H, W)\n data = np.expand_dims(data, 0) # make a channel\n data = xform(data)[0] # first channel\n if mode != InterpolateMode.NEAREST:\n data = np.clip(data, _min, _max) # type: ignore\n\n if scale is not None:\n data = np.clip(data, 0.0, 1.0) # type: ignore # png writer only can scale data in range [0, 1]\n if scale == np.iinfo(np.uint8).max:\n data = (scale * data).astype(np.uint8)\n elif scale == np.iinfo(np.uint16).max:\n data = (scale * data).astype(np.uint16)\n else:\n raise ValueError(f\"Unsupported scale: {scale}, available options are [255, 65535]\")\n\n # PNG data must be int number\n if data.dtype not in (np.uint8, np.uint16): # type: ignore\n data = data.astype(np.uint8)\n\n img = Image.fromarray(data)\n img.save(file_name, \"PNG\")\n return", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/synthetic.py_from_typing_import_Option_create_test_image_2d.return.noisyimage_labels": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/synthetic.py_from_typing_import_Option_create_test_image_2d.return.noisyimage_labels", "embedding": null, "metadata": {"file_path": "monai/data/synthetic.py", "file_name": "synthetic.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 80, "span_ids": ["create_test_image_2d", "docstring"], "tokens": 717}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Tuple\n\nimport numpy as np\n\nfrom monai.transforms.utils import rescale_array\n\n__all__ = [\"create_test_image_2d\", \"create_test_image_3d\"]\n\n\ndef create_test_image_2d(\n width: int,\n height: int,\n num_objs: int = 12,\n rad_max: int = 30,\n noise_max: float = 0.0,\n num_seg_classes: int = 5,\n channel_dim: Optional[int] = None,\n random_state: Optional[np.random.RandomState] = None,\n) -> Tuple[np.ndarray, np.ndarray]:\n \"\"\"\n Return a noisy 2D image with `num_objs` circles and a 2D mask image. The maximum radius of the circles is given as\n `rad_max`. The mask will have `num_seg_classes` number of classes for segmentations labeled sequentially from 1, plus a\n background class represented as 0. If `noise_max` is greater than 0 then noise will be added to the image taken from\n the uniform distribution on range `[0,noise_max)`. If `channel_dim` is None, will create an image without channel\n dimension, otherwise create an image with channel dimension as first dim or last dim.\n\n Args:\n width: width of the image.\n height: height of the image.\n num_objs: number of circles to generate. Defaults to `12`.\n rad_max: maximum circle radius. Defaults to `30`.\n noise_max: if greater than 0 then noise will be added to the image taken from\n the uniform distribution on range `[0,noise_max)`. Defaults to `0`.\n num_seg_classes: number of classes for segmentations. Defaults to `5`.\n channel_dim: if None, create an image without channel dimension, otherwise create\n an image with channel dimension as first dim or last dim. Defaults to `None`.\n random_state: the random generator to use. Defaults to `np.random`.\n \"\"\"\n image = np.zeros((width, height))\n rs = np.random if random_state is None else random_state\n\n for _ in range(num_objs):\n x = rs.randint(rad_max, width - rad_max)\n y = rs.randint(rad_max, height - rad_max)\n rad = rs.randint(5, rad_max)\n spy, spx = np.ogrid[-x : width - x, -y : height - y]\n circle = (spx * spx + spy * spy) <= rad * rad\n\n if num_seg_classes > 1:\n image[circle] = np.ceil(rs.random() * num_seg_classes)\n else:\n image[circle] = rs.random() * 0.5 + 0.5\n\n labels = np.ceil(image).astype(np.int32)\n\n norm = rs.uniform(0, num_seg_classes * noise_max, size=image.shape)\n noisyimage = rescale_array(np.maximum(image, norm))\n\n if channel_dim is not None:\n if not (isinstance(channel_dim, int) and channel_dim in (-1, 0, 2)):\n raise AssertionError(\"invalid channel dim.\")\n if channel_dim == 0:\n noisyimage = noisyimage[None]\n labels = labels[None]\n else:\n noisyimage = noisyimage[..., None]\n labels = labels[..., None]\n\n return noisyimage, labels", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/synthetic.py_create_test_image_3d_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/synthetic.py_create_test_image_3d_", "embedding": null, "metadata": {"file_path": "monai/data/synthetic.py", "file_name": "synthetic.py", "file_type": "text/x-python", "category": "implementation", "start_line": 83, "end_line": 142, "span_ids": ["create_test_image_3d"], "tokens": 607}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_test_image_3d(\n height: int,\n width: int,\n depth: int,\n num_objs: int = 12,\n rad_max: int = 30,\n noise_max: float = 0.0,\n num_seg_classes: int = 5,\n channel_dim: Optional[int] = None,\n random_state: Optional[np.random.RandomState] = None,\n) -> Tuple[np.ndarray, np.ndarray]:\n \"\"\"\n Return a noisy 3D image and segmentation.\n\n Args:\n height: height of the image.\n width: width of the image.\n depth: depth of the image.\n num_objs: number of circles to generate. Defaults to `12`.\n rad_max: maximum circle radius. Defaults to `30`.\n noise_max: if greater than 0 then noise will be added to the image taken from\n the uniform distribution on range `[0,noise_max)`. Defaults to `0`.\n num_seg_classes: number of classes for segmentations. Defaults to `5`.\n channel_dim: if None, create an image without channel dimension, otherwise create\n an image with channel dimension as first dim or last dim. Defaults to `None`.\n random_state: the random generator to use. Defaults to `np.random`.\n\n See also:\n :py:meth:`~create_test_image_2d`\n \"\"\"\n image = np.zeros((width, height, depth))\n rs = np.random if random_state is None else random_state\n\n for _ in range(num_objs):\n x = rs.randint(rad_max, width - rad_max)\n y = rs.randint(rad_max, height - rad_max)\n z = rs.randint(rad_max, depth - rad_max)\n rad = rs.randint(5, rad_max)\n spy, spx, spz = np.ogrid[-x : width - x, -y : height - y, -z : depth - z]\n circle = (spx * spx + spy * spy + spz * spz) <= rad * rad\n\n if num_seg_classes > 1:\n image[circle] = np.ceil(rs.random() * num_seg_classes)\n else:\n image[circle] = rs.random() * 0.5 + 0.5\n\n labels = np.ceil(image).astype(np.int32)\n\n norm = rs.uniform(0, num_seg_classes * noise_max, size=image.shape)\n noisyimage = rescale_array(np.maximum(image, norm))\n\n if channel_dim is not None:\n if not (isinstance(channel_dim, int) and channel_dim in (-1, 0, 3)):\n raise AssertionError(\"invalid channel dim.\")\n noisyimage, labels = (\n (noisyimage[None], labels[None]) if channel_dim == 0 else (noisyimage[..., None], labels[..., None])\n )\n\n return noisyimage, labels", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_iter_patch_slices_iter_patch_slices.for_position_in_product_.yield_tuple_slice_s_s_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_iter_patch_slices_iter_patch_slices.for_position_in_product_.yield_tuple_slice_s_s_", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 53, "end_line": 80, "span_ids": ["iter_patch_slices"], "tokens": 313}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def iter_patch_slices(\n dims: Sequence[int], patch_size: Union[Sequence[int], int], start_pos: Sequence[int] = ()\n) -> Generator[Tuple[slice, ...], None, None]:\n \"\"\"\n Yield successive tuples of slices defining patches of size `patch_size` from an array of dimensions `dims`. The\n iteration starts from position `start_pos` in the array, or starting at the origin if this isn't provided. Each\n patch is chosen in a contiguous grid using a first dimension as least significant ordering.\n\n Args:\n dims: dimensions of array to iterate over\n patch_size: size of patches to generate slices for, 0 or None selects whole dimension\n start_pos: starting position in the array, default is 0 for each dimension\n\n Yields:\n Tuples of slice objects defining each patch\n \"\"\"\n\n # ensure patchSize and startPos are the right length\n ndim = len(dims)\n patch_size_ = get_valid_patch_size(dims, patch_size)\n start_pos = ensure_tuple_size(start_pos, ndim)\n\n # collect the ranges to step over each dimension\n ranges = tuple(starmap(range, zip(start_pos, dims, patch_size_)))\n\n # choose patches by applying product to the ranges\n for position in product(*ranges[::-1]): # reverse ranges order to iterate in index order\n yield tuple(slice(s, s + p) for s, p in zip(position[::-1], patch_size_))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_iter_patch_iter_patch.if_copy_back_.arr_arrpad_slices_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_iter_patch_iter_patch.if_copy_back_.arr_arrpad_slices_", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 169, "end_line": 228, "span_ids": ["iter_patch"], "tokens": 746}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def iter_patch(\n arr: np.ndarray,\n patch_size: Union[Sequence[int], int] = 0,\n start_pos: Sequence[int] = (),\n copy_back: bool = True,\n mode: Union[NumpyPadMode, str] = NumpyPadMode.WRAP,\n **pad_opts: Dict,\n):\n \"\"\"\n Yield successive patches from `arr` of size `patch_size`. The iteration can start from position `start_pos` in `arr`\n but drawing from a padded array extended by the `patch_size` in each dimension (so these coordinates can be negative\n to start in the padded region). If `copy_back` is True the values from each patch are written back to `arr`.\n\n Args:\n arr: array to iterate over\n patch_size: size of patches to generate slices for, 0 or None selects whole dimension\n start_pos: starting position in the array, default is 0 for each dimension\n copy_back: if True data from the yielded patches is copied back to `arr` once the generator completes\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function. Defaults to ``\"wrap\"``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n pad_opts: padding options, see `numpy.pad`\n\n Yields:\n Patches of array data from `arr` which are views into a padded array which can be modified, if `copy_back` is\n True these changes will be reflected in `arr` once the iteration completes.\n\n Note:\n coordinate format is:\n\n [1st_dim_start, 1st_dim_end,\n 2nd_dim_start, 2nd_dim_end,\n ...,\n Nth_dim_start, Nth_dim_end]]\n\n \"\"\"\n # ensure patchSize and startPos are the right length\n patch_size_ = get_valid_patch_size(arr.shape, patch_size)\n start_pos = ensure_tuple_size(start_pos, arr.ndim)\n\n # pad image by maximum values needed to ensure patches are taken from inside an image\n arrpad = np.pad(arr, tuple((p, p) for p in patch_size_), NumpyPadMode(mode).value, **pad_opts)\n\n # choose a start position in the padded image\n start_pos_padded = tuple(s + p for s, p in zip(start_pos, patch_size_))\n\n # choose a size to iterate over which is smaller than the actual padded image to prevent producing\n # patches which are only in the padded regions\n iter_size = tuple(s + p for s, p in zip(arr.shape, patch_size_))\n\n for slices in iter_patch_slices(iter_size, patch_size_, start_pos_padded):\n # compensate original image padding\n coords_no_pad = tuple((coord.start - p, coord.stop - p) for coord, p in zip(slices, patch_size_))\n yield arrpad[slices], np.asarray(coords_no_pad) # data and coords (in numpy; works with torch loader)\n\n # copy back data from the padded image if required\n if copy_back:\n slices = tuple(slice(p, p + s) for p, s in zip(patch_size_, arr.shape))\n arr[...] = arrpad[slices]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_get_valid_patch_size_get_valid_patch_size.return.tuple_min_ms_ps_or_ms_f": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_get_valid_patch_size_get_valid_patch_size.return.tuple_min_ms_ps_or_ms_f", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 198, "end_line": 209, "span_ids": ["get_valid_patch_size"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_valid_patch_size(image_size: Sequence[int], patch_size: Union[Sequence[int], int]) -> Tuple[int, ...]:\n \"\"\"\n Given an image of dimensions `image_size`, return a patch size tuple taking the dimension from `patch_size` if this is\n not 0/None. Otherwise, or if `patch_size` is shorter than `image_size`, the dimension from `image_size` is taken. This ensures\n the returned patch size is within the bounds of `image_size`. If `patch_size` is a single number this is interpreted as a\n patch of the same dimensionality of `image_size` with that size in each dimension.\n \"\"\"\n ndim = len(image_size)\n patch_size_ = ensure_tuple_size(patch_size, ndim)\n\n # ensure patch size dimensions are not larger than image dimension, if a dimension is None or 0 use whole dimension\n return tuple(min(ms, ps or ms) for ms, ps in zip(image_size, patch_size_))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_correct_nifti_header_if_necessary_correct_nifti_header_if_necessary.return.img_nii": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_correct_nifti_header_if_necessary_correct_nifti_header_if_necessary.return.img_nii", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 238, "end_line": 256, "span_ids": ["correct_nifti_header_if_necessary"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def correct_nifti_header_if_necessary(img_nii):\n \"\"\"\n Check nifti object header's format, update the header if needed.\n In the updated image pixdim matches the affine.\n\n Args:\n img_nii: nifti image object\n \"\"\"\n dim = img_nii.header[\"dim\"][0]\n if dim >= 5:\n return img_nii # do nothing for high-dimensional array\n # check that affine matches zooms\n pixdim = np.asarray(img_nii.header.get_zooms())[:dim]\n norm_affine = np.sqrt(np.sum(np.square(img_nii.affine[:dim, :dim]), 0))\n if np.allclose(pixdim, norm_affine):\n return img_nii\n if hasattr(img_nii, \"get_sform\"):\n return rectify_header_sform_qform(img_nii)\n return img_nii", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_rectify_header_sform_qform_rectify_header_sform_qform.return.img_nii": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_rectify_header_sform_qform_rectify_header_sform_qform.return.img_nii", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 259, "end_line": 294, "span_ids": ["rectify_header_sform_qform"], "tokens": 388}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def rectify_header_sform_qform(img_nii):\n \"\"\"\n Look at the sform and qform of the nifti object and correct it if any\n incompatibilities with pixel dimensions\n\n Adapted from https://github.com/NifTK/NiftyNet/blob/v0.6.0/niftynet/io/misc_io.py\n\n Args:\n img_nii: nifti image object\n \"\"\"\n d = img_nii.header[\"dim\"][0]\n pixdim = np.asarray(img_nii.header.get_zooms())[:d]\n sform, qform = img_nii.get_sform(), img_nii.get_qform()\n norm_sform = np.sqrt(np.sum(np.square(sform[:d, :d]), 0))\n norm_qform = np.sqrt(np.sum(np.square(qform[:d, :d]), 0))\n sform_mismatch = not np.allclose(norm_sform, pixdim)\n qform_mismatch = not np.allclose(norm_qform, pixdim)\n\n if img_nii.header[\"sform_code\"] != 0:\n if not sform_mismatch:\n return img_nii\n if not qform_mismatch:\n img_nii.set_sform(img_nii.get_qform())\n return img_nii\n if img_nii.header[\"qform_code\"] != 0:\n if not qform_mismatch:\n return img_nii\n if not sform_mismatch:\n img_nii.set_qform(img_nii.get_sform())\n return img_nii\n\n norm = np.sqrt(np.sum(np.square(img_nii.affine[:d, :d]), 0))\n warnings.warn(f\"Modifying image pixdim from {pixdim} to {norm}\")\n\n img_nii.header.set_zooms(norm)\n return img_nii", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_zoom_affine_zoom_affine.return.new_affine": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_zoom_affine_zoom_affine.return.new_affine", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 478, "end_line": 524, "span_ids": ["zoom_affine"], "tokens": 527}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def zoom_affine(affine: np.ndarray, scale: Sequence[float], diagonal: bool = True):\n \"\"\"\n To make column norm of `affine` the same as `scale`. If diagonal is False,\n returns an affine that combines orthogonal rotation and the new scale.\n This is done by first decomposing `affine`, then setting the zoom factors to\n `scale`, and composing a new affine; the shearing factors are removed. If\n diagonal is True, returns a diagonal matrix, the scaling factors are set\n to the diagonal elements. This function always return an affine with zero\n translations.\n\n Args:\n affine (nxn matrix): a square matrix.\n scale: new scaling factor along each dimension.\n diagonal: whether to return a diagonal scaling matrix.\n Defaults to True.\n\n Raises:\n ValueError: When ``affine`` is not a square matrix.\n ValueError: When ``scale`` contains a nonpositive scalar.\n\n Returns:\n the updated `n x n` affine.\n\n \"\"\"\n\n affine = np.array(affine, dtype=float, copy=True)\n if len(affine) != len(affine[0]):\n raise ValueError(f\"affine must be n x n, got {len(affine)} x {len(affine[0])}.\")\n scale_np = np.array(scale, dtype=float, copy=True)\n if np.any(scale_np <= 0):\n raise ValueError(\"scale must contain only positive numbers.\")\n d = len(affine) - 1\n if len(scale_np) < d: # defaults based on affine\n norm = np.sqrt(np.sum(np.square(affine), 0))[:-1]\n scale_np = np.append(scale_np, norm[len(scale_np) :])\n scale_np = scale_np[:d]\n scale_np[scale_np == 0] = 1.0\n if diagonal:\n return np.diag(np.append(scale_np, [1.0]))\n rzs = affine[:-1, :-1] # rotation zoom scale\n zs = np.linalg.cholesky(rzs.T @ rzs).T\n rotation = rzs @ np.linalg.inv(zs)\n s = np.sign(np.diag(zs)) * np.abs(scale_np)\n # construct new affine with rotation and zoom\n new_affine = np.eye(len(affine))\n new_affine[:-1, :-1] = rotation @ np.diag(s)\n return new_affine", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_compute_shape_offset_compute_shape_offset.return.out_shape_astype_int_of": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_compute_shape_offset_compute_shape_offset.return.out_shape_astype_int_of", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 527, "end_line": 560, "span_ids": ["compute_shape_offset"], "tokens": 406}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def compute_shape_offset(\n spatial_shape: Union[np.ndarray, Sequence[int]], in_affine: np.ndarray, out_affine: np.ndarray\n) -> Tuple[np.ndarray, np.ndarray]:\n \"\"\"\n Given input and output affine, compute appropriate shapes\n in the output space based on the input array's shape.\n This function also returns the offset to put the shape\n in a good position with respect to the world coordinate system.\n\n Args:\n spatial_shape: input array's shape\n in_affine (matrix): 2D affine matrix\n out_affine (matrix): 2D affine matrix\n \"\"\"\n shape = np.array(spatial_shape, copy=True, dtype=float)\n sr = len(shape)\n in_affine = to_affine_nd(sr, in_affine)\n out_affine = to_affine_nd(sr, out_affine)\n in_coords = [(0.0, dim - 1.0) for dim in shape]\n corners = np.asarray(np.meshgrid(*in_coords, indexing=\"ij\")).reshape((len(shape), -1))\n corners = np.concatenate((corners, np.ones_like(corners[:1])))\n corners = in_affine @ corners\n corners_out = np.linalg.inv(out_affine) @ corners\n corners_out = corners_out[:-1] / corners_out[-1]\n out_shape = np.round(corners_out.ptp(axis=1) + 1.0)\n if np.allclose(nib.io_orientation(in_affine), nib.io_orientation(out_affine)):\n # same orientation, get translate from the origin\n offset = in_affine @ ([0] * sr + [1])\n offset = offset[:-1] / offset[-1]\n else:\n # different orientation, the min is the origin\n corners = corners[:-1] / corners[-1]\n offset = np.min(corners, 1)\n return out_shape.astype(int), offset", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_to_affine_nd_to_affine_nd.return.new_affine": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_to_affine_nd_to_affine_nd.return.new_affine", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 382, "end_line": 422, "span_ids": ["to_affine_nd"], "tokens": 501}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def to_affine_nd(r: Union[np.ndarray, int], affine: np.ndarray) -> np.ndarray:\n \"\"\"\n Using elements from affine, to create a new affine matrix by\n assigning the rotation/zoom/scaling matrix and the translation vector.\n\n when ``r`` is an integer, output is an (r+1)x(r+1) matrix,\n where the top left kxk elements are copied from ``affine``,\n the last column of the output affine is copied from ``affine``'s last column.\n `k` is determined by `min(r, len(affine) - 1)`.\n\n when ``r`` is an affine matrix, the output has the same as ``r``,\n the top left kxk elements are copied from ``affine``,\n the last column of the output affine is copied from ``affine``'s last column.\n `k` is determined by `min(len(r) - 1, len(affine) - 1)`.\n\n Args:\n r (int or matrix): number of spatial dimensions or an output affine to be filled.\n affine (matrix): 2D affine matrix\n\n Raises:\n ValueError: When ``affine`` dimensions is not 2.\n ValueError: When ``r`` is nonpositive.\n\n Returns:\n an (r+1) x (r+1) matrix\n\n \"\"\"\n affine_np = np.array(affine, dtype=np.float64)\n if affine_np.ndim != 2:\n raise ValueError(f\"affine must have 2 dimensions, got {affine_np.ndim}.\")\n new_affine = np.array(r, dtype=np.float64, copy=True)\n if new_affine.ndim == 0:\n sr = new_affine.astype(int)\n if not np.isfinite(sr) or sr < 0:\n raise ValueError(f\"r must be positive, got {sr}.\")\n new_affine = np.eye(sr + 1, dtype=np.float64)\n d = max(min(len(new_affine) - 1, len(affine_np) - 1), 1)\n new_affine[:d, :d] = affine_np[:d, :d]\n if d > 1:\n new_affine[:d, -1] = affine_np[:d, -1]\n return new_affine", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_EnsembleEvaluator._iteration_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/evaluator.py_EnsembleEvaluator._iteration_", "embedding": null, "metadata": {"file_path": "monai/engines/evaluator.py", "file_name": "evaluator.py", "file_type": "text/x-python", "category": "implementation", "start_line": 318, "end_line": 362, "span_ids": ["EnsembleEvaluator._iteration"], "tokens": 415}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class EnsembleEvaluator(Evaluator):\n\n def _iteration(self, engine: Engine, batchdata: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:\n \"\"\"\n callback function for the Supervised Evaluation processing logic of 1 iteration in Ignite Engine.\n Return below items in a dictionary:\n - IMAGE: image Tensor data for model input, already moved to device.\n - LABEL: label Tensor data corresponding to the image, already moved to device.\n - pred_keys[0]: prediction result of network 0.\n - pred_keys[1]: prediction result of network 1.\n - ... ...\n - pred_keys[N]: prediction result of network N.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n batchdata: input data for this iteration, usually can be dictionary or tuple of Tensor data.\n\n Raises:\n ValueError: When ``batchdata`` is None.\n\n \"\"\"\n if batchdata is None:\n raise ValueError(\"Must provide batch data for current iteration.\")\n batch = self.prepare_batch(batchdata, engine.state.device, engine.non_blocking)\n if len(batch) == 2:\n inputs, targets = batch\n args: Tuple = ()\n kwargs: Dict = {}\n else:\n inputs, targets, args, kwargs = batch\n\n # put iteration outputs into engine.state\n engine.state.output = {Keys.IMAGE: inputs, Keys.LABEL: targets}\n for idx, network in enumerate(self.networks):\n with self.mode(network):\n if self.amp:\n with torch.cuda.amp.autocast():\n engine.state.output.update(\n {self.pred_keys[idx]: self.inferer(inputs, network, *args, **kwargs)}\n )\n else:\n engine.state.output.update({self.pred_keys[idx]: self.inferer(inputs, network, *args, **kwargs)})\n engine.fire_event(IterationEvents.FORWARD_COMPLETED)\n engine.fire_event(IterationEvents.MODEL_COMPLETED)\n\n return engine.state.output", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_create_multigpu_supervised_trainer_create_multigpu_supervised_trainer.return.create_supervised_trainer": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_create_multigpu_supervised_trainer_create_multigpu_supervised_trainer.return.create_supervised_trainer", "embedding": null, "metadata": {"file_path": "monai/engines/multi_gpu_supervised_trainer.py", "file_name": "multi_gpu_supervised_trainer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 43, "end_line": 89, "span_ids": ["create_multigpu_supervised_trainer"], "tokens": 437}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_multigpu_supervised_trainer(\n net: torch.nn.Module,\n optimizer: Optimizer,\n loss_fn: Callable,\n devices: Optional[Sequence[torch.device]] = None,\n non_blocking: bool = False,\n prepare_batch: Callable = _prepare_batch,\n output_transform: Callable = _default_transform,\n distributed: bool = False,\n) -> Engine:\n \"\"\"\n Derived from `create_supervised_trainer` in Ignite.\n\n Factory function for creating a trainer for supervised models.\n\n Args:\n net: the network to train.\n optimizer: the optimizer to use.\n loss_fn: the loss function to use.\n devices: device(s) type specification (default: None).\n Applies to both model and batches. None is all devices used, empty list is CPU only.\n non_blocking: if True and this copy is between CPU and GPU, the copy may occur asynchronously\n with respect to the host. For other cases, this argument has no effect.\n prepare_batch: function that receives `batch`, `device`, `non_blocking` and outputs\n tuple of tensors `(batch_x, batch_y)`.\n output_transform: function that receives 'x', 'y', 'y_pred', 'loss' and returns value\n to be assigned to engine's state.output after each iteration. Default is returning `loss.item()`.\n distributed: whether convert model to `DistributedDataParallel`, if have multiple devices, use\n the first device as output device.\n\n Returns:\n Engine: a trainer engine with supervised update function.\n\n Note:\n `engine.state.output` for this engine is defined by `output_transform` parameter and is the loss\n of the processed batch by default.\n \"\"\"\n\n devices_ = get_devices_spec(devices)\n if distributed:\n net = DistributedDataParallel(net, device_ids=devices_)\n elif len(devices_) > 1:\n net = DataParallel(net)\n\n return create_supervised_trainer(\n net, optimizer, loss_fn, devices_[0], non_blocking, prepare_batch, output_transform\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_create_multigpu_supervised_evaluator_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_create_multigpu_supervised_evaluator_", "embedding": null, "metadata": {"file_path": "monai/engines/multi_gpu_supervised_trainer.py", "file_name": "multi_gpu_supervised_trainer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 92, "end_line": 137, "span_ids": ["create_multigpu_supervised_evaluator"], "tokens": 450}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_multigpu_supervised_evaluator(\n net: torch.nn.Module,\n metrics: Optional[Dict[str, Metric]] = None,\n devices: Optional[Sequence[torch.device]] = None,\n non_blocking: bool = False,\n prepare_batch: Callable = _prepare_batch,\n output_transform: Callable = _default_eval_transform,\n distributed: bool = False,\n) -> Engine:\n \"\"\"\n Derived from `create_supervised_evaluator` in Ignite.\n\n Factory function for creating an evaluator for supervised models.\n\n Args:\n net: the model to train.\n metrics: a map of metric names to Metrics.\n devices: device(s) type specification (default: None).\n Applies to both model and batches. None is all devices used, empty list is CPU only.\n non_blocking: if True and this copy is between CPU and GPU, the copy may occur asynchronously\n with respect to the host. For other cases, this argument has no effect.\n prepare_batch: function that receives `batch`, `device`, `non_blocking` and outputs\n tuple of tensors `(batch_x, batch_y)`.\n output_transform: function that receives 'x', 'y', 'y_pred' and returns value\n to be assigned to engine's state.output after each iteration. Default is returning `(y_pred, y,)`\n which fits output expected by metrics. If you change it you should use `output_transform` in metrics.\n distributed: whether convert model to `DistributedDataParallel`, if have multiple devices, use\n the first device as output device.\n\n Note:\n `engine.state.output` for this engine is defined by `output_transform` parameter and is\n a tuple of `(batch_pred, batch_y)` by default.\n\n Returns:\n Engine: an evaluator engine with supervised inference function.\n \"\"\"\n\n devices_ = get_devices_spec(devices)\n\n if distributed:\n net = DistributedDataParallel(net, device_ids=devices_)\n elif len(devices_) > 1:\n net = DataParallel(net)\n\n return create_supervised_evaluator(net, metrics, devices_[0], non_blocking, prepare_batch, output_transform)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_Trainer_Trainer.get_train_stats.return._total_epochs_self_sta": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_Trainer_Trainer.get_train_stats.return._total_epochs_self_sta", "embedding": null, "metadata": {"file_path": "monai/engines/trainer.py", "file_name": "trainer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 33, "end_line": 49, "span_ids": ["Trainer.run", "Trainer.get_train_stats", "Trainer"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Trainer(Workflow):\n \"\"\"\n Base class for all kinds of trainers, inherits from Workflow.\n\n \"\"\"\n\n def run(self) -> None:\n \"\"\"\n Execute training based on Ignite Engine.\n If call this function multiple times, it will continuously run from the previous state.\n\n \"\"\"\n self.scaler = torch.cuda.amp.GradScaler() if self.amp else None\n super().run()\n\n def get_train_stats(self) -> Dict[str, float]:\n return {\"total_epochs\": self.state.max_epochs, \"total_iterations\": self.state.epoch_length}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/workflow.py_Workflow_Workflow._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/workflow.py_Workflow_Workflow._", "embedding": null, "metadata": {"file_path": "monai/engines/workflow.py", "file_name": "workflow.py", "file_type": "text/x-python", "category": "implementation", "start_line": 36, "end_line": 76, "span_ids": ["Workflow"], "tokens": 597}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Workflow(IgniteEngine): # type: ignore[valid-type, misc] # due to optional_import\n \"\"\"\n Workflow defines the core work process inheriting from Ignite engine.\n All trainer, validator and evaluator share this same workflow as base class,\n because they all can be treated as same Ignite engine loops.\n It initializes all the sharable data in Ignite engine.state.\n And attach additional processing logics to Ignite engine based on Event-Handler mechanism.\n\n Users should consider to inherit from `trainer` or `evaluator` to develop more trainers or evaluators.\n\n Args:\n device: an object representing the device on which to run.\n max_epochs: the total epoch number for engine to run, validator and evaluator have only 1 epoch.\n data_loader: Ignite engine use data_loader to run, must be Iterable or torch.DataLoader.\n epoch_length: number of iterations for one epoch, default to `len(data_loader)`.\n non_blocking: if True and this copy is between CPU and GPU, the copy may occur asynchronously\n with respect to the host. For other cases, this argument has no effect.\n prepare_batch: function to parse image and label for every iteration.\n iteration_update: the callable function for every iteration, expect to accept `engine`\n and `batchdata` as input parameters. if not provided, use `self._iteration()` instead.\n post_transform: execute additional transformation for the model output data.\n Typically, several Tensor based transforms composed by `Compose`.\n key_metric: compute metric when every iteration completed, and save average value to\n engine.state.metrics when epoch completed. key_metric is the main metric to compare and save the\n checkpoint into files.\n additional_metrics: more Ignite metrics that also attach to Ignite Engine.\n handlers: every handler is a set of Ignite Event-Handlers, must have `attach` function, like:\n CheckpointHandler, StatsHandler, SegmentationSaver, etc.\n amp: whether to enable auto-mixed-precision training or inference, default is False.\n event_names: additional custom ignite events that will register to the engine.\n new events can be a list of str or `ignite.engine.events.EventEnum`.\n event_to_attr: a dictionary to map an event to a state attribute, then add to `engine.state`.\n for more details, check: https://github.com/pytorch/ignite/blob/v0.4.4.post1/ignite/engine/engine.py#L160\n\n Raises:\n TypeError: When ``device`` is not a ``torch.Device``.\n TypeError: When ``data_loader`` is not a ``torch.utils.data.DataLoader``.\n TypeError: When ``key_metric`` is not a ``Optional[dict]``.\n TypeError: When ``additional_metrics`` is not a ``Optional[dict]``.\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/__init__.py_CheckpointLoader_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/__init__.py_CheckpointLoader_", "embedding": null, "metadata": {"file_path": "monai/handlers/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 40, "span_ids": ["docstring"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from .checkpoint_loader import CheckpointLoader\nfrom .checkpoint_saver import CheckpointSaver\nfrom .classification_saver import ClassificationSaver\nfrom .confusion_matrix import ConfusionMatrix\nfrom .earlystop_handler import EarlyStopHandler\nfrom .garbage_collector import GarbageCollector\nfrom .hausdorff_distance import HausdorffDistance\nfrom .iteration_metric import IterationMetric\nfrom .lr_schedule_handler import LrScheduleHandler\nfrom .mean_dice import MeanDice\nfrom .metric_logger import MetricLogger, MetricLoggerKeys\nfrom .metrics_saver import MetricsSaver\nfrom .parameter_scheduler import ParamSchedulerHandler\nfrom .roc_auc import ROCAUC\nfrom .segmentation_saver import SegmentationSaver\nfrom .smartcache_handler import SmartCacheHandler\nfrom .stats_handler import StatsHandler\nfrom .surface_distance import SurfaceDistance\nfrom .tensorboard_handlers import TensorBoardHandler, TensorBoardImageHandler, TensorBoardStatsHandler\nfrom .transform_inverter import TransformInverter\nfrom .utils import (\n evenly_divisible_all_gather,\n stopping_fn_from_loss,\n stopping_fn_from_metric,\n string_list_all_gather,\n write_metrics_reports,\n)\nfrom .validation_handler import ValidationHandler", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver_CheckpointSaver._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver_CheckpointSaver._", "embedding": null, "metadata": {"file_path": "monai/handlers/checkpoint_saver.py", "file_name": "checkpoint_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 29, "end_line": 79, "span_ids": ["CheckpointSaver"], "tokens": 646}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CheckpointSaver:\n \"\"\"\n CheckpointSaver acts as an Ignite handler to save checkpoint data into files.\n It supports to save according to metrics result, epoch number, iteration number\n and last model or exception.\n\n Args:\n save_dir: the target directory to save the checkpoints.\n save_dict: source objects that save to the checkpoint. examples::\n\n {'network': net, 'optimizer': optimizer, 'lr_scheduler': lr_scheduler}\n\n name: identifier of logging.logger to use, if None, defaulting to ``engine.logger``.\n file_prefix: prefix for the filenames to which objects will be saved.\n save_final: whether to save checkpoint or session at final iteration or exception.\n If checkpoints are to be saved when an exception is raised, put this handler before\n `StatsHandler` in the handler list, because the logic with Ignite can only trigger\n the first attached handler for `EXCEPTION_RAISED` event.\n final_filename: set a fixed filename to save the final model if `save_final=True`.\n If None, default to `checkpoint_final_iteration=N.pt`.\n save_key_metric: whether to save checkpoint or session when the value of key_metric is\n higher than all the previous values during training.keep 4 decimal places of metric,\n checkpoint name is: {file_prefix}_key_metric=0.XXXX.pth.\n key_metric_name: the name of key_metric in ignite metrics dictionary.\n If None, use `engine.state.key_metric` instead.\n key_metric_n_saved: save top N checkpoints or sessions, sorted by the value of key\n metric in descending order.\n key_metric_filename: set a fixed filename to set the best metric model, if not None,\n `key_metric_n_saved` should be 1 and only keep the best metric model.\n key_metric_save_state: whether to save the tracking list of key metric in the checkpoint file.\n if `True`, then will save an object in the checkpoint file with key `checkpointer` to be consistent\n with ignite: https://github.com/pytorch/ignite/blob/master/ignite/handlers/checkpoint.py#L99.\n typically, it's used to resume training and compare current metric with previous N values.\n key_metric_greater_or_equal: if `True`, the latest equally scored model is stored. Otherwise,\n save the the first equally scored model. default to `False`.\n epoch_level: save checkpoint during training for every N epochs or every N iterations.\n `True` is epoch level, `False` is iteration level.\n save_interval: save checkpoint every N epochs, default is 0 to save no checkpoint.\n n_saved: save latest N checkpoints of epoch level or iteration level, 'None' is to save all.\n\n Note:\n CheckpointHandler can be used during training, validation or evaluation.\n example of saved files:\n\n - checkpoint_iteration=400.pt\n - checkpoint_iteration=800.pt\n - checkpoint_epoch=1.pt\n - checkpoint_final_iteration=1000.pt\n - checkpoint_key_metric=0.9387.pt\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.attach_CheckpointSaver.attach.if_self__interval_checkpo.if_self_epoch_level_.else_.engine_add_event_handler_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.attach_CheckpointSaver.attach.if_self__interval_checkpo.if_self_epoch_level_.else_.engine_add_event_handler_", "embedding": null, "metadata": {"file_path": "monai/handlers/checkpoint_saver.py", "file_name": "checkpoint_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 141, "end_line": 157, "span_ids": ["CheckpointSaver.attach"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CheckpointSaver:\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if self._name is None:\n self.logger = engine.logger\n if self._final_checkpoint is not None:\n engine.add_event_handler(Events.COMPLETED, self.completed)\n engine.add_event_handler(Events.EXCEPTION_RAISED, self.exception_raised)\n if self._key_metric_checkpoint is not None:\n engine.add_event_handler(Events.EPOCH_COMPLETED, self.metrics_completed)\n if self._interval_checkpoint is not None:\n if self.epoch_level:\n engine.add_event_handler(Events.EPOCH_COMPLETED(every=self.save_interval), self.interval_completed)\n else:\n engine.add_event_handler(Events.ITERATION_COMPLETED(every=self.save_interval), self.interval_completed)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/lr_schedule_handler.py_LrScheduleHandler_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/lr_schedule_handler.py_LrScheduleHandler_", "embedding": null, "metadata": {"file_path": "monai/handlers/lr_schedule_handler.py", "file_name": "lr_schedule_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 26, "end_line": 85, "span_ids": ["LrScheduleHandler.__init__", "LrScheduleHandler", "LrScheduleHandler.__call__", "LrScheduleHandler.attach"], "tokens": 510}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LrScheduleHandler:\n \"\"\"\n Ignite handler to update the Learning Rate based on PyTorch LR scheduler.\n \"\"\"\n\n def __init__(\n self,\n lr_scheduler: Union[_LRScheduler, ReduceLROnPlateau],\n print_lr: bool = True,\n name: Optional[str] = None,\n epoch_level: bool = True,\n step_transform: Callable[[Engine], Any] = lambda engine: (),\n ) -> None:\n \"\"\"\n Args:\n lr_scheduler: typically, lr_scheduler should be PyTorch\n lr_scheduler object. If customized version, must have `step` and `get_last_lr` methods.\n print_lr: whether to print out the latest learning rate with logging.\n name: identifier of logging.logger to use, if None, defaulting to ``engine.logger``.\n epoch_level: execute lr_scheduler.step() after every epoch or every iteration.\n `True` is epoch level, `False` is iteration level.\n step_transform: a callable that is used to transform the information from `engine`\n to expected input data of lr_scheduler.step() function if necessary.\n\n Raises:\n TypeError: When ``step_transform`` is not ``callable``.\n\n \"\"\"\n self.lr_scheduler = lr_scheduler\n self.print_lr = print_lr\n self.logger = logging.getLogger(name)\n self.epoch_level = epoch_level\n if not callable(step_transform):\n raise TypeError(f\"step_transform must be callable but is {type(step_transform).__name__}.\")\n self.step_transform = step_transform\n\n self._name = name\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if self._name is None:\n self.logger = engine.logger\n if self.epoch_level:\n engine.add_event_handler(Events.EPOCH_COMPLETED, self)\n else:\n engine.add_event_handler(Events.ITERATION_COMPLETED, self)\n\n def __call__(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n args = ensure_tuple(self.step_transform(engine))\n self.lr_scheduler.step(*args)\n if self.print_lr:\n self.logger.info(f\"Current learning rate: {self.lr_scheduler._last_lr[0]}\") # type: ignore[union-attr]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_SegmentationSaver_SegmentationSaver.__init__.self._name.name": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_SegmentationSaver_SegmentationSaver.__init__.self._name.name", "embedding": null, "metadata": {"file_path": "monai/handlers/segmentation_saver.py", "file_name": "segmentation_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 29, "end_line": 129, "span_ids": ["SegmentationSaver", "SegmentationSaver.__init__"], "tokens": 1175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SegmentationSaver:\n \"\"\"\n Event handler triggered on completing every iteration to save the segmentation predictions into files.\n It can extract the input image meta data(filename, affine, original_shape, etc.) and resample the predictions\n based on the meta data.\n\n \"\"\"\n\n def __init__(\n self,\n output_dir: str = \"./\",\n output_postfix: str = \"seg\",\n output_ext: str = \".nii.gz\",\n resample: bool = True,\n mode: Union[GridSampleMode, InterpolateMode, str] = \"nearest\",\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.BORDER,\n scale: Optional[int] = None,\n dtype: DtypeLike = np.float64,\n output_dtype: DtypeLike = np.float32,\n squeeze_end_dims: bool = True,\n data_root_dir: str = \"\",\n batch_transform: Callable = lambda x: x,\n output_transform: Callable = lambda x: x,\n name: Optional[str] = None,\n ) -> None:\n \"\"\"\n Args:\n output_dir: output image directory.\n output_postfix: a string appended to all output file names, default to `seg`.\n output_ext: output file extension name, available extensions: `.nii.gz`, `.nii`, `.png`.\n resample: whether to resample before saving the data array.\n if saving PNG format image, based on the `spatial_shape` from metadata.\n if saving NIfTI format image, based on the `original_affine` from metadata.\n mode: This option is used when ``resample = True``. Defaults to ``\"nearest\"``.\n\n - NIfTI files {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n - PNG files {``\"nearest\"``, ``\"linear\"``, ``\"bilinear\"``, ``\"bicubic\"``, ``\"trilinear\"``, ``\"area\"``}\n The interpolation mode.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n\n padding_mode: This option is used when ``resample = True``. Defaults to ``\"border\"``.\n\n - NIfTI files {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n - PNG files\n This option is ignored.\n\n scale: {``255``, ``65535``} postprocess data by clipping to [0, 1] and scaling\n [0, 255] (uint8) or [0, 65535] (uint16). Default is None to disable scaling.\n It's used for PNG format only.\n dtype: data type for resampling computation. Defaults to ``np.float64`` for best precision.\n If None, use the data type of input data.\n It's used for Nifti format only.\n output_dtype: data type for saving data. Defaults to ``np.float32``, it's used for Nifti format only.\n squeeze_end_dims: if True, any trailing singleton dimensions will be removed (after the channel\n has been moved to the end). So if input is (C,H,W,D), this will be altered to (H,W,D,C), and\n then if C==1, it will be saved as (H,W,D). If D also ==1, it will be saved as (H,W). If false,\n image will always be saved as (H,W,D,C).\n it's used for NIfTI format only.\n data_root_dir: if not empty, it specifies the beginning parts of the input file's\n absolute path. it's used to compute `input_file_rel_path`, the relative path to the file from\n `data_root_dir` to preserve folder structure when saving in case there are files in different\n folders with the same file names. for example:\n input_file_name: /foo/bar/test1/image.nii,\n output_postfix: seg\n output_ext: nii.gz\n output_dir: /output,\n data_root_dir: /foo/bar,\n output will be: /output/test1/image/image_seg.nii.gz\n batch_transform: a callable that is used to transform the\n ignite.engine.batch into expected format to extract the meta_data dictionary.\n it can be used to extract the input image meta data: filename, affine, original_shape, etc.\n output_transform: a callable that is used to transform the\n ignite.engine.output into the form expected image data.\n The first dimension of this transform's output will be treated as the\n batch dimension. Each item in the batch will be saved individually.\n name: identifier of logging.logger to use, defaulting to `engine.logger`.\n\n \"\"\"\n self._saver = SaveImage(\n output_dir=output_dir,\n output_postfix=output_postfix,\n output_ext=output_ext,\n resample=resample,\n mode=mode,\n padding_mode=padding_mode,\n scale=scale,\n dtype=dtype,\n output_dtype=output_dtype,\n squeeze_end_dims=squeeze_end_dims,\n data_root_dir=data_root_dir,\n )\n self.resample = resample\n self.batch_transform = batch_transform\n self.output_transform = output_transform\n\n self.logger = logging.getLogger(name)\n self._name = name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_SegmentationSaver.attach_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_SegmentationSaver.attach_", "embedding": null, "metadata": {"file_path": "monai/handlers/segmentation_saver.py", "file_name": "segmentation_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 131, "end_line": 164, "span_ids": ["SegmentationSaver.__call__", "SegmentationSaver.attach"], "tokens": 349}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SegmentationSaver:\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if self._name is None:\n self.logger = engine.logger\n if not engine.has_event_handler(self, Events.ITERATION_COMPLETED):\n engine.add_event_handler(Events.ITERATION_COMPLETED, self)\n\n def __call__(self, engine: Engine) -> None:\n \"\"\"\n This method assumes self.batch_transform will extract metadata from the input batch.\n Output file datatype is determined from ``engine.state.output.dtype``.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n meta_data = self.batch_transform(engine.state.batch)\n engine_output = self.output_transform(engine.state.output)\n if isinstance(engine_output, (tuple, list)):\n # if a list of data in shape: [channel, H, W, [D]], save every item separately\n if self.resample:\n warnings.warn(\"if saving inverted data, please set `resample=False` as it's already resampled.\")\n\n self._saver.save_batch = False\n for i, d in enumerate(engine_output):\n self._saver(d, {k: meta_data[k][i] for k in meta_data} if meta_data is not None else None)\n else:\n # if the data is in shape: [batch, channel, H, W, [D]]\n self._saver.save_batch = True\n self._saver(engine_output, meta_data)\n self.logger.info(\"saved all the model outputs into files.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler_StatsHandler.__init__.if_logger_handler_is_not_.self_logger_addHandler_lo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler_StatsHandler.__init__.if_logger_handler_is_not_.self_logger_addHandler_lo", "embedding": null, "metadata": {"file_path": "monai/handlers/stats_handler.py", "file_name": "stats_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 30, "end_line": 86, "span_ids": ["StatsHandler.__init__", "StatsHandler"], "tokens": 581}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class StatsHandler:\n \"\"\"\n StatsHandler defines a set of Ignite Event-handlers for all the log printing logics.\n It's can be used for any Ignite Engine(trainer, validator and evaluator).\n And it can support logging for epoch level and iteration level with pre-defined loggers.\n\n Default behaviors:\n - When EPOCH_COMPLETED, logs ``engine.state.metrics`` using ``self.logger``.\n - When ITERATION_COMPLETED, logs\n ``self.output_transform(engine.state.output)`` using ``self.logger``.\n\n \"\"\"\n\n def __init__(\n self,\n epoch_print_logger: Optional[Callable[[Engine], Any]] = None,\n iteration_print_logger: Optional[Callable[[Engine], Any]] = None,\n output_transform: Callable = lambda x: x,\n global_epoch_transform: Callable = lambda x: x,\n name: Optional[str] = None,\n tag_name: str = DEFAULT_TAG,\n key_var_format: str = DEFAULT_KEY_VAL_FORMAT,\n logger_handler: Optional[logging.Handler] = None,\n ) -> None:\n \"\"\"\n\n Args:\n epoch_print_logger: customized callable printer for epoch level logging.\n Must accept parameter \"engine\", use default printer if None.\n iteration_print_logger: customized callable printer for iteration level logging.\n Must accept parameter \"engine\", use default printer if None.\n output_transform: a callable that is used to transform the\n ``ignite.engine.output`` into a scalar to print, or a dictionary of {key: scalar}.\n In the latter case, the output string will be formatted as key: value.\n By default this value logging happens when every iteration completed.\n global_epoch_transform: a callable that is used to customize global epoch number.\n For example, in evaluation, the evaluator engine might want to print synced epoch number\n with the trainer engine.\n name: identifier of logging.logger to use, defaulting to ``engine.logger``.\n tag_name: when iteration output is a scalar, tag_name is used to print\n tag_name: scalar_value to logger. Defaults to ``'Loss'``.\n key_var_format: a formatting string to control the output string format of key: value.\n logger_handler: add additional handler to handle the stats data: save to file, etc.\n Add existing python logging handlers: https://docs.python.org/3/library/logging.handlers.html\n \"\"\"\n\n self.epoch_print_logger = epoch_print_logger\n self.iteration_print_logger = iteration_print_logger\n self.output_transform = output_transform\n self.global_epoch_transform = global_epoch_transform\n self.logger = logging.getLogger(name)\n self._name = name\n\n self.tag_name = tag_name\n self.key_var_format = key_var_format\n if logger_handler is not None:\n self.logger.addHandler(logger_handler)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.attach_StatsHandler.attach.None_3.engine_add_event_handler_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.attach_StatsHandler.attach.None_3.engine_add_event_handler_", "embedding": null, "metadata": {"file_path": "monai/handlers/stats_handler.py", "file_name": "stats_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 88, "end_line": 103, "span_ids": ["StatsHandler.attach"], "tokens": 173}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class StatsHandler:\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Register a set of Ignite Event-Handlers to a specified Ignite engine.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n \"\"\"\n if self._name is None:\n self.logger = engine.logger\n if not engine.has_event_handler(self.iteration_completed, Events.ITERATION_COMPLETED):\n engine.add_event_handler(Events.ITERATION_COMPLETED, self.iteration_completed)\n if not engine.has_event_handler(self.epoch_completed, Events.EPOCH_COMPLETED):\n engine.add_event_handler(Events.EPOCH_COMPLETED, self.epoch_completed)\n if not engine.has_event_handler(self.exception_raised, Events.EXCEPTION_RAISED):\n engine.add_event_handler(Events.EXCEPTION_RAISED, self.exception_raised)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler._default_epoch_print_StatsHandler._default_epoch_print.None_1": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler._default_epoch_print_StatsHandler._default_epoch_print.None_1", "embedding": null, "metadata": {"file_path": "monai/handlers/stats_handler.py", "file_name": "stats_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 147, "end_line": 171, "span_ids": ["StatsHandler._default_epoch_print"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class StatsHandler:\n\n def _default_epoch_print(self, engine: Engine) -> None:\n \"\"\"\n Execute epoch level log operation based on Ignite engine.state data.\n print the values from Ignite state.metrics dict.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n \"\"\"\n prints_dict = engine.state.metrics\n if not prints_dict:\n return\n current_epoch = self.global_epoch_transform(engine.state.epoch)\n\n out_str = f\"Epoch[{current_epoch}] Metrics -- \"\n for name in sorted(prints_dict):\n value = prints_dict[name]\n out_str += self.key_var_format.format(name, value)\n self.logger.info(out_str)\n\n if hasattr(engine.state, \"key_metric_name\"):\n if hasattr(engine.state, \"best_metric\") and hasattr(engine.state, \"best_metric_epoch\"):\n out_str = f\"Key metric: {engine.state.key_metric_name} \"\n out_str += f\"best value: {engine.state.best_metric} at epoch: {engine.state.best_metric_epoch}\"\n self.logger.info(out_str)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler._default_iteration_print_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler._default_iteration_print_", "embedding": null, "metadata": {"file_path": "monai/handlers/stats_handler.py", "file_name": "stats_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 173, "end_line": 224, "span_ids": ["StatsHandler._default_iteration_print"], "tokens": 452}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class StatsHandler:\n\n def _default_iteration_print(self, engine: Engine) -> None:\n \"\"\"\n Execute iteration log operation based on Ignite engine.state data.\n Print the values from Ignite state.logs dict.\n Default behavior is to print loss from output[1], skip if output[1] is not loss.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n \"\"\"\n loss = self.output_transform(engine.state.output)\n if loss is None:\n return # no printing if the output is empty\n\n out_str = \"\"\n if isinstance(loss, dict): # print dictionary items\n for name in sorted(loss):\n value = loss[name]\n if not is_scalar(value):\n warnings.warn(\n \"ignoring non-scalar output in StatsHandler,\"\n \" make sure `output_transform(engine.state.output)` returns\"\n \" a scalar or dictionary of key and scalar pairs to avoid this warning.\"\n \" {}:{}\".format(name, type(value))\n )\n continue # not printing multi dimensional output\n out_str += self.key_var_format.format(name, value.item() if isinstance(value, torch.Tensor) else value)\n else:\n if is_scalar(loss): # not printing multi dimensional output\n out_str += self.key_var_format.format(\n self.tag_name, loss.item() if isinstance(loss, torch.Tensor) else loss\n )\n else:\n warnings.warn(\n \"ignoring non-scalar output in StatsHandler,\"\n \" make sure `output_transform(engine.state.output)` returns\"\n \" a scalar or a dictionary of key and scalar pairs to avoid this warning.\"\n \" {}\".format(type(loss))\n )\n\n if not out_str:\n return # no value to print\n\n num_iterations = engine.state.epoch_length\n current_iteration = (engine.state.iteration - 1) % num_iterations + 1\n current_epoch = engine.state.epoch\n num_epochs = engine.state.max_epochs\n\n base_str = f\"Epoch: {current_epoch}/{num_epochs}, Iter: {current_iteration}/{num_iterations} --\"\n\n self.logger.info(\" \".join([base_str, out_str]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler_TensorBoardStatsHandler.__init__.self.tag_name.tag_name": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler_TensorBoardStatsHandler.__init__.self.tag_name.tag_name", "embedding": null, "metadata": {"file_path": "monai/handlers/tensorboard_handlers.py", "file_name": "tensorboard_handlers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 63, "end_line": 116, "span_ids": ["TensorBoardStatsHandler.__init__", "TensorBoardStatsHandler"], "tokens": 637}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TensorBoardStatsHandler(TensorBoardHandler):\n \"\"\"\n TensorBoardStatsHandler defines a set of Ignite Event-handlers for all the TensorBoard logics.\n It's can be used for any Ignite Engine(trainer, validator and evaluator).\n And it can support both epoch level and iteration level with pre-defined TensorBoard event writer.\n The expected data source is Ignite ``engine.state.output`` and ``engine.state.metrics``.\n\n Default behaviors:\n - When EPOCH_COMPLETED, write each dictionary item in\n ``engine.state.metrics`` to TensorBoard.\n - When ITERATION_COMPLETED, write each dictionary item in\n ``self.output_transform(engine.state.output)`` to TensorBoard.\n \"\"\"\n\n def __init__(\n self,\n summary_writer: Optional[SummaryWriter] = None,\n log_dir: str = \"./runs\",\n epoch_event_writer: Optional[Callable[[Engine, SummaryWriter], Any]] = None,\n epoch_interval: int = 1,\n iteration_event_writer: Optional[Callable[[Engine, SummaryWriter], Any]] = None,\n iteration_interval: int = 1,\n output_transform: Callable = lambda x: x,\n global_epoch_transform: Callable = lambda x: x,\n tag_name: str = DEFAULT_TAG,\n ) -> None:\n \"\"\"\n Args:\n summary_writer: user can specify TensorBoard SummaryWriter,\n default to create a new writer.\n log_dir: if using default SummaryWriter, write logs to this directory, default is `./runs`.\n epoch_event_writer: customized callable TensorBoard writer for epoch level.\n Must accept parameter \"engine\" and \"summary_writer\", use default event writer if None.\n epoch_interval: the epoch interval at which the epoch_event_writer is called. Defaults to 1.\n iteration_event_writer: customized callable TensorBoard writer for iteration level.\n Must accept parameter \"engine\" and \"summary_writer\", use default event writer if None.\n iteration_interval: the iteration interval at which the iteration_event_writer is called. Defaults to 1.\n output_transform: a callable that is used to transform the\n ``ignite.engine.output`` into a scalar to plot, or a dictionary of {key: scalar}.\n In the latter case, the output string will be formatted as key: value.\n By default this value plotting happens when every iteration completed.\n global_epoch_transform: a callable that is used to customize global epoch number.\n For example, in evaluation, the evaluator engine might want to use trainer engines epoch number\n when plotting epoch vs metric curves.\n tag_name: when iteration output is a scalar, tag_name is used to plot, defaults to ``'Loss'``.\n \"\"\"\n super().__init__(summary_writer=summary_writer, log_dir=log_dir)\n self.epoch_event_writer = epoch_event_writer\n self.epoch_interval = epoch_interval\n self.iteration_event_writer = iteration_event_writer\n self.iteration_interval = iteration_interval\n self.output_transform = output_transform\n self.global_epoch_transform = global_epoch_transform\n self.tag_name = tag_name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler.attach_TensorBoardStatsHandler.attach.None_1.engine_add_event_handler_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler.attach_TensorBoardStatsHandler.attach.None_1.engine_add_event_handler_", "embedding": null, "metadata": {"file_path": "monai/handlers/tensorboard_handlers.py", "file_name": "tensorboard_handlers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 118, "end_line": 131, "span_ids": ["TensorBoardStatsHandler.attach"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TensorBoardStatsHandler(TensorBoardHandler):\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Register a set of Ignite Event-Handlers to a specified Ignite engine.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n \"\"\"\n if not engine.has_event_handler(self.iteration_completed, Events.ITERATION_COMPLETED):\n engine.add_event_handler(\n Events.ITERATION_COMPLETED(every=self.iteration_interval), self.iteration_completed\n )\n if not engine.has_event_handler(self.epoch_completed, Events.EPOCH_COMPLETED):\n engine.add_event_handler(Events.EPOCH_COMPLETED(every=self.epoch_interval), self.epoch_completed)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler.epoch_completed_TensorBoardStatsHandler.iteration_completed.if_self_iteration_event_w.else_.self__default_iteration_w": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler.epoch_completed_TensorBoardStatsHandler.iteration_completed.if_self_iteration_event_w.else_.self__default_iteration_w", "embedding": null, "metadata": {"file_path": "monai/handlers/tensorboard_handlers.py", "file_name": "tensorboard_handlers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 133, "end_line": 159, "span_ids": ["TensorBoardStatsHandler.iteration_completed", "TensorBoardStatsHandler.epoch_completed"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TensorBoardStatsHandler(TensorBoardHandler):\n\n def epoch_completed(self, engine: Engine) -> None:\n \"\"\"\n Handler for train or validation/evaluation epoch completed Event.\n Write epoch level events, default values are from Ignite state.metrics dict.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n \"\"\"\n if self.epoch_event_writer is not None:\n self.epoch_event_writer(engine, self._writer)\n else:\n self._default_epoch_writer(engine, self._writer)\n\n def iteration_completed(self, engine: Engine) -> None:\n \"\"\"\n Handler for train or validation/evaluation iteration completed Event.\n Write iteration level events, default values are from Ignite state.logs dict.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n \"\"\"\n if self.iteration_event_writer is not None:\n self.iteration_event_writer(engine, self._writer)\n else:\n self._default_iteration_writer(engine, self._writer)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler._default_epoch_writer_TensorBoardStatsHandler._default_epoch_writer.writer_flush_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler._default_epoch_writer_TensorBoardStatsHandler._default_epoch_writer.writer_flush_", "embedding": null, "metadata": {"file_path": "monai/handlers/tensorboard_handlers.py", "file_name": "tensorboard_handlers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 161, "end_line": 175, "span_ids": ["TensorBoardStatsHandler._default_epoch_writer"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TensorBoardStatsHandler(TensorBoardHandler):\n\n def _default_epoch_writer(self, engine: Engine, writer: SummaryWriter) -> None:\n \"\"\"\n Execute epoch level event write operation based on Ignite engine.state data.\n Default is to write the values from Ignite state.metrics dict.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n writer: TensorBoard writer, created in TensorBoardHandler.\n\n \"\"\"\n current_epoch = self.global_epoch_transform(engine.state.epoch)\n summary_dict = engine.state.metrics\n for name, value in summary_dict.items():\n writer.add_scalar(name, value, current_epoch)\n writer.flush()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler._default_iteration_writer_TensorBoardStatsHandler._default_iteration_writer.writer_flush_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardStatsHandler._default_iteration_writer_TensorBoardStatsHandler._default_iteration_writer.writer_flush_", "embedding": null, "metadata": {"file_path": "monai/handlers/tensorboard_handlers.py", "file_name": "tensorboard_handlers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 177, "end_line": 215, "span_ids": ["TensorBoardStatsHandler._default_iteration_writer"], "tokens": 353}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TensorBoardStatsHandler(TensorBoardHandler):\n\n def _default_iteration_writer(self, engine: Engine, writer: SummaryWriter) -> None:\n \"\"\"\n Execute iteration level event write operation based on Ignite engine.state data.\n Default is to write the loss value of current iteration.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n writer: TensorBoard writer, created in TensorBoardHandler.\n\n \"\"\"\n loss = self.output_transform(engine.state.output)\n if loss is None:\n return # do nothing if output is empty\n if isinstance(loss, dict):\n for name in sorted(loss):\n value = loss[name]\n if not is_scalar(value):\n warnings.warn(\n \"ignoring non-scalar output in TensorBoardStatsHandler,\"\n \" make sure `output_transform(engine.state.output)` returns\"\n \" a scalar or dictionary of key and scalar pairs to avoid this warning.\"\n \" {}:{}\".format(name, type(value))\n )\n continue # not plot multi dimensional output\n writer.add_scalar(\n name, value.item() if isinstance(value, torch.Tensor) else value, engine.state.iteration\n )\n elif is_scalar(loss): # not printing multi dimensional output\n writer.add_scalar(\n self.tag_name, loss.item() if isinstance(loss, torch.Tensor) else loss, engine.state.iteration\n )\n else:\n warnings.warn(\n \"ignoring non-scalar output in TensorBoardStatsHandler,\"\n \" make sure `output_transform(engine.state.output)` returns\"\n \" a scalar or a dictionary of key and scalar pairs to avoid this warning.\"\n \" {}\".format(type(loss))\n )\n writer.flush()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardImageHandler_TensorBoardImageHandler.__init__.self.max_channels.max_channels": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardImageHandler_TensorBoardImageHandler.__init__.self.max_channels.max_channels", "embedding": null, "metadata": {"file_path": "monai/handlers/tensorboard_handlers.py", "file_name": "tensorboard_handlers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 218, "end_line": 279, "span_ids": ["TensorBoardImageHandler.__init__", "TensorBoardImageHandler"], "tokens": 731}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TensorBoardImageHandler(TensorBoardHandler):\n \"\"\"\n TensorBoardImageHandler is an Ignite Event handler that can visualize images, labels and outputs as 2D/3D images.\n 2D output (shape in Batch, channel, H, W) will be shown as simple image using the first element in the batch,\n for 3D to ND output (shape in Batch, channel, H, W, D) input, each of ``self.max_channels`` number of images'\n last three dimensions will be shown as animated GIF along the last axis (typically Depth).\n\n It can be used for any Ignite Engine (trainer, validator and evaluator).\n User can easily add it to engine for any expected Event, for example: ``EPOCH_COMPLETED``,\n ``ITERATION_COMPLETED``. The expected data source is ignite's ``engine.state.batch`` and ``engine.state.output``.\n\n Default behavior:\n - Show y_pred as images (GIF for 3D) on TensorBoard when Event triggered,\n - Need to use ``batch_transform`` and ``output_transform`` to specify\n how many images to show and show which channel.\n - Expects ``batch_transform(engine.state.batch)`` to return data\n format: (image[N, channel, ...], label[N, channel, ...]).\n - Expects ``output_transform(engine.state.output)`` to return a torch\n tensor in format (y_pred[N, channel, ...], loss).\n\n \"\"\"\n\n def __init__(\n self,\n summary_writer: Optional[SummaryWriter] = None,\n log_dir: str = \"./runs\",\n interval: int = 1,\n epoch_level: bool = True,\n batch_transform: Callable = lambda x: x,\n output_transform: Callable = lambda x: x,\n global_iter_transform: Callable = lambda x: x,\n index: int = 0,\n max_channels: int = 1,\n max_frames: int = 64,\n ) -> None:\n \"\"\"\n Args:\n summary_writer: user can specify TensorBoard SummaryWriter,\n default to create a new writer.\n log_dir: if using default SummaryWriter, write logs to this directory, default is `./runs`.\n interval: plot content from engine.state every N epochs or every N iterations, default is 1.\n epoch_level: plot content from engine.state every N epochs or N iterations. `True` is epoch level,\n `False` is iteration level.\n batch_transform: a callable that is used to transform the\n ``ignite.engine.batch`` into expected format to extract several label data.\n output_transform: a callable that is used to transform the\n ``ignite.engine.output`` into expected format to extract several output data.\n global_iter_transform: a callable that is used to customize global step number for TensorBoard.\n For example, in evaluation, the evaluator engine needs to know current epoch from trainer.\n index: plot which element in a data batch, default is the first element.\n max_channels: number of channels to plot.\n max_frames: number of frames for 2D-t plot.\n \"\"\"\n super().__init__(summary_writer=summary_writer, log_dir=log_dir)\n self.interval = interval\n self.epoch_level = epoch_level\n self.batch_transform = batch_transform\n self.output_transform = output_transform\n self.global_iter_transform = global_iter_transform\n self.index = index\n self.max_frames = max_frames\n self.max_channels = max_channels", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardImageHandler.attach_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_TensorBoardImageHandler.attach_", "embedding": null, "metadata": {"file_path": "monai/handlers/tensorboard_handlers.py", "file_name": "tensorboard_handlers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 281, "end_line": 346, "span_ids": ["TensorBoardImageHandler.__call__", "TensorBoardImageHandler.attach"], "tokens": 650}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TensorBoardImageHandler(TensorBoardHandler):\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if self.epoch_level:\n engine.add_event_handler(Events.EPOCH_COMPLETED(every=self.interval), self)\n else:\n engine.add_event_handler(Events.ITERATION_COMPLETED(every=self.interval), self)\n\n def __call__(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n Raises:\n TypeError: When ``output_transform(engine.state.output)[0]`` type is not in\n ``Optional[Union[numpy.ndarray, torch.Tensor]]``.\n TypeError: When ``batch_transform(engine.state.batch)[1]`` type is not in\n ``Optional[Union[numpy.ndarray, torch.Tensor]]``.\n TypeError: When ``output_transform(engine.state.output)`` type is not in\n ``Optional[Union[numpy.ndarray, torch.Tensor]]``.\n\n \"\"\"\n step = self.global_iter_transform(engine.state.epoch if self.epoch_level else engine.state.iteration)\n show_images = self.batch_transform(engine.state.batch)[0]\n if isinstance(show_images, torch.Tensor):\n show_images = show_images.detach().cpu().numpy()\n if show_images is not None:\n if not isinstance(show_images, np.ndarray):\n raise TypeError(\n \"output_transform(engine.state.output)[0] must be None or one of \"\n f\"(numpy.ndarray, torch.Tensor) but is {type(show_images).__name__}.\"\n )\n plot_2d_or_3d_image(\n show_images, step, self._writer, self.index, self.max_channels, self.max_frames, \"input_0\"\n )\n\n show_labels = self.batch_transform(engine.state.batch)[1]\n if isinstance(show_labels, torch.Tensor):\n show_labels = show_labels.detach().cpu().numpy()\n if show_labels is not None:\n if not isinstance(show_labels, np.ndarray):\n raise TypeError(\n \"batch_transform(engine.state.batch)[1] must be None or one of \"\n f\"(numpy.ndarray, torch.Tensor) but is {type(show_labels).__name__}.\"\n )\n plot_2d_or_3d_image(\n show_labels, step, self._writer, self.index, self.max_channels, self.max_frames, \"input_1\"\n )\n\n show_outputs = self.output_transform(engine.state.output)\n if isinstance(show_outputs, torch.Tensor):\n show_outputs = show_outputs.detach().cpu().numpy()\n if show_outputs is not None:\n if not isinstance(show_outputs, np.ndarray):\n raise TypeError(\n \"output_transform(engine.state.output) must be None or one of \"\n f\"(numpy.ndarray, torch.Tensor) but is {type(show_outputs).__name__}.\"\n )\n plot_2d_or_3d_image(\n show_outputs, step, self._writer, self.index, self.max_channels, self.max_frames, \"output\"\n )\n\n self._writer.flush()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/validation_handler.py_from_typing_import_TYPE_C_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/validation_handler.py_from_typing_import_TYPE_C_", "embedding": null, "metadata": {"file_path": "monai/handlers/validation_handler.py", "file_name": "validation_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 76, "span_ids": ["ValidationHandler.__init__", "ValidationHandler.set_validator", "ValidationHandler.__call__", "docstring", "ValidationHandler.attach", "ValidationHandler"], "tokens": 546}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import TYPE_CHECKING, Optional\n\nfrom monai.engines.evaluator import Evaluator\nfrom monai.utils import exact_version, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")\n\n\nclass ValidationHandler:\n \"\"\"\n Attach validator to the trainer engine in Ignite.\n It can support to execute validation every N epochs or every N iterations.\n\n \"\"\"\n\n def __init__(self, interval: int, validator: Optional[Evaluator] = None, epoch_level: bool = True) -> None:\n \"\"\"\n Args:\n interval: do validation every N epochs or every N iterations during training.\n validator: run the validator when trigger validation, suppose to be Evaluator.\n if None, should call `set_validator()` before training.\n epoch_level: execute validation every N epochs or N iterations.\n `True` is epoch level, `False` is iteration level.\n\n Raises:\n TypeError: When ``validator`` is not a ``monai.engines.evaluator.Evaluator``.\n\n \"\"\"\n if validator is not None and not isinstance(validator, Evaluator):\n raise TypeError(f\"validator must be a monai.engines.evaluator.Evaluator but is {type(validator).__name__}.\")\n self.validator = validator\n self.interval = interval\n self.epoch_level = epoch_level\n\n def set_validator(self, validator: Evaluator):\n \"\"\"\n Set validator if not setting in the __init__().\n \"\"\"\n if not isinstance(validator, Evaluator):\n raise TypeError(f\"validator must be a monai.engines.evaluator.Evaluator but is {type(validator).__name__}.\")\n self.validator = validator\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if self.epoch_level:\n engine.add_event_handler(Events.EPOCH_COMPLETED(every=self.interval), self)\n else:\n engine.add_event_handler(Events.ITERATION_COMPLETED(every=self.interval), self)\n\n def __call__(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if self.validator is None:\n raise RuntimeError(\"please set validator in __init__() or call `set_validator()` before training.\")\n self.validator.run(engine.state.epoch)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/utils.py__get_scan_interval_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/inferers/utils.py__get_scan_interval_", "embedding": null, "metadata": {"file_path": "monai/inferers/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 144, "end_line": 166, "span_ids": ["_get_scan_interval"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _get_scan_interval(\n image_size: Sequence[int], roi_size: Sequence[int], num_spatial_dims: int, overlap: float\n) -> Tuple[int, ...]:\n \"\"\"\n Compute scan interval according to the image size, roi size and overlap.\n Scan interval will be `int((1 - overlap) * roi_size)`, if interval is 0,\n use 1 instead to make sure sliding window works.\n\n \"\"\"\n if len(image_size) != num_spatial_dims:\n raise ValueError(\"image coord different from spatial dims.\")\n if len(roi_size) != num_spatial_dims:\n raise ValueError(\"roi coord different from spatial dims.\")\n\n scan_interval = []\n for i in range(num_spatial_dims):\n if roi_size[i] == image_size[i]:\n scan_interval.append(int(roi_size[i]))\n else:\n interval = int(roi_size[i] * (1 - overlap))\n scan_interval.append(interval if interval > 0 else 1)\n return tuple(scan_interval)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_DiceLoss.forward_DiceLoss.forward.return.f": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_DiceLoss.forward_DiceLoss.forward.return.f", "embedding": null, "metadata": {"file_path": "monai/losses/dice.py", "file_name": "dice.py", "file_type": "text/x-python", "category": "implementation", "start_line": 102, "end_line": 175, "span_ids": ["DiceLoss.forward"], "tokens": 646}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DiceLoss(_Loss):\n\n def forward(self, input: torch.Tensor, target: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n input: the shape should be BNH[WD], where N is the number of classes.\n target: the shape should be BNH[WD] or B1H[WD], where N is the number of classes.\n\n Raises:\n AssertionError: When input and target (after one hot transform if setted)\n have different shapes.\n ValueError: When ``self.reduction`` is not one of [\"mean\", \"sum\", \"none\"].\n\n \"\"\"\n if self.sigmoid:\n input = torch.sigmoid(input)\n\n n_pred_ch = input.shape[1]\n if self.softmax:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `softmax=True` ignored.\")\n else:\n input = torch.softmax(input, 1)\n\n if self.other_act is not None:\n input = self.other_act(input)\n\n if self.to_onehot_y:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `to_onehot_y=True` ignored.\")\n else:\n target = one_hot(target, num_classes=n_pred_ch)\n\n if not self.include_background:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `include_background=False` ignored.\")\n else:\n # if skipping background, removing first channel\n target = target[:, 1:]\n input = input[:, 1:]\n\n if target.shape != input.shape:\n raise AssertionError(f\"ground truth has different shape ({target.shape}) from input ({input.shape})\")\n\n # reducing only spatial dimensions (not batch nor channels)\n reduce_axis: List[int] = torch.arange(2, len(input.shape)).tolist()\n if self.batch:\n # reducing spatial dimensions and batch\n reduce_axis = [0] + reduce_axis\n\n intersection = torch.sum(target * input, dim=reduce_axis)\n\n if self.squared_pred:\n target = torch.pow(target, 2)\n input = torch.pow(input, 2)\n\n ground_o = torch.sum(target, dim=reduce_axis)\n pred_o = torch.sum(input, dim=reduce_axis)\n\n denominator = ground_o + pred_o\n\n if self.jaccard:\n denominator = 2.0 * (denominator - intersection)\n\n f: torch.Tensor = 1.0 - (2.0 * intersection + self.smooth_nr) / (denominator + self.smooth_dr)\n\n if self.reduction == LossReduction.MEAN.value:\n f = torch.mean(f) # the batch and channel average\n elif self.reduction == LossReduction.SUM.value:\n f = torch.sum(f) # sum over the batch and channel dims\n elif self.reduction == LossReduction.NONE.value:\n pass # returns [N, n_classes] losses\n else:\n raise ValueError(f'Unsupported reduction: {self.reduction}, available options are [\"mean\", \"sum\", \"none\"].')\n\n return f", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/tversky.py_TverskyLoss.forward_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/tversky.py_TverskyLoss.forward_", "embedding": null, "metadata": {"file_path": "monai/losses/tversky.py", "file_name": "tversky.py", "file_type": "text/x-python", "category": "implementation", "start_line": 96, "end_line": 162, "span_ids": ["TverskyLoss.forward"], "tokens": 578}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TverskyLoss(_Loss):\n\n def forward(self, input: torch.Tensor, target: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n input: the shape should be BNH[WD].\n target: the shape should be BNH[WD].\n\n Raises:\n ValueError: When ``self.reduction`` is not one of [\"mean\", \"sum\", \"none\"].\n\n \"\"\"\n if self.sigmoid:\n input = torch.sigmoid(input)\n\n n_pred_ch = input.shape[1]\n if self.softmax:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `softmax=True` ignored.\")\n else:\n input = torch.softmax(input, 1)\n\n if self.other_act is not None:\n input = self.other_act(input)\n\n if self.to_onehot_y:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `to_onehot_y=True` ignored.\")\n else:\n target = one_hot(target, num_classes=n_pred_ch)\n\n if not self.include_background:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `include_background=False` ignored.\")\n else:\n # if skipping background, removing first channel\n target = target[:, 1:]\n input = input[:, 1:]\n\n if target.shape != input.shape:\n raise AssertionError(f\"ground truth has differing shape ({target.shape}) from input ({input.shape})\")\n\n p0 = input\n p1 = 1 - p0\n g0 = target\n g1 = 1 - g0\n\n # reducing only spatial dimensions (not batch nor channels)\n reduce_axis: List[int] = torch.arange(2, len(input.shape)).tolist()\n if self.batch:\n # reducing spatial dimensions and batch\n reduce_axis = [0] + reduce_axis\n\n tp = torch.sum(p0 * g0, reduce_axis)\n fp = self.alpha * torch.sum(p0 * g1, reduce_axis)\n fn = self.beta * torch.sum(p1 * g0, reduce_axis)\n numerator = tp + self.smooth_nr\n denominator = tp + fp + fn + self.smooth_dr\n\n score: torch.Tensor = 1.0 - numerator / denominator\n\n if self.reduction == LossReduction.SUM.value:\n return torch.sum(score) # sum over the batch and channel dims\n if self.reduction == LossReduction.NONE.value:\n return score # returns [N, n_classes] losses\n if self.reduction == LossReduction.MEAN.value:\n return torch.mean(score)\n raise ValueError(f'Unsupported reduction: {self.reduction}, available options are [\"mean\", \"sum\", \"none\"].')", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/downsample.py_from_typing_import_Option_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/downsample.py_from_typing_import_Option_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/downsample.py", "file_name": "downsample.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 63, "span_ids": ["MaxAvgPool", "MaxAvgPool.__init__", "MaxAvgPool.forward", "docstring"], "tokens": 418}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Sequence, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.layers.factories import Pool\nfrom monai.utils import ensure_tuple_rep\n\n\nclass MaxAvgPool(nn.Module):\n \"\"\"\n Downsample with both maxpooling and avgpooling,\n double the channel size by concatenating the downsampled feature maps.\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n kernel_size: Union[Sequence[int], int],\n stride: Optional[Union[Sequence[int], int]] = None,\n padding: Union[Sequence[int], int] = 0,\n ceil_mode: bool = False,\n ) -> None:\n \"\"\"\n Args:\n spatial_dims: number of spatial dimensions of the input image.\n kernel_size: the kernel size of both pooling operations.\n stride: the stride of the window. Default value is `kernel_size`.\n padding: implicit zero padding to be added to both pooling operations.\n ceil_mode: when True, will use ceil instead of floor to compute the output shape.\n \"\"\"\n super().__init__()\n _params = {\n \"kernel_size\": ensure_tuple_rep(kernel_size, spatial_dims),\n \"stride\": None if stride is None else ensure_tuple_rep(stride, spatial_dims),\n \"padding\": ensure_tuple_rep(padding, spatial_dims),\n \"ceil_mode\": ceil_mode,\n }\n self.max_pool = Pool[Pool.MAX, spatial_dims](**_params)\n self.avg_pool = Pool[Pool.AVG, spatial_dims](**_params)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n x: Tensor in shape (batch, channel, spatial_1[, spatial_2, ...]).\n\n Returns:\n Tensor in shape (batch, 2*channel, spatial_1[, spatial_2, ...]).\n \"\"\"\n x_d = torch.cat([self.max_pool(x), self.avg_pool(x)], dim=1)\n return x_d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_from_typing_import_Any_C_LayerFactory.add_factory_callable.self.__doc__._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_from_typing_import_Any_C_LayerFactory.add_factory_callable.self.__doc__._", "embedding": null, "metadata": {"file_path": "monai/networks/layers/factories.py", "file_name": "factories.py", "file_type": "text/x-python", "category": "implementation", "start_line": 63, "end_line": 98, "span_ids": ["LayerFactory", "LayerFactory.add_factory_callable", "docstring:11", "LayerFactory.__init__", "LayerFactory.names"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Any, Callable, Dict, Optional, Tuple, Type, Union\n\nimport torch.nn as nn\n\n__all__ = [\"LayerFactory\", \"Dropout\", \"Norm\", \"Act\", \"Conv\", \"Pool\", \"Pad\", \"split_args\"]\n\n\nclass LayerFactory:\n \"\"\"\n Factory object for creating layers, this uses given factory functions to actually produce the types or constructing\n callables. These functions are referred to by name and can be added at any time.\n \"\"\"\n\n def __init__(self) -> None:\n self.factories: Dict[str, Callable] = {}\n\n @property\n def names(self) -> Tuple[str, ...]:\n \"\"\"\n Produces all factory names.\n \"\"\"\n\n return tuple(self.factories)\n\n def add_factory_callable(self, name: str, func: Callable) -> None:\n \"\"\"\n Add the factory function to this object under the given name.\n \"\"\"\n\n self.factories[name.upper()] = func\n self.__doc__ = (\n \"The supported member\"\n + (\"s are: \" if len(self.names) > 1 else \" is: \")\n + \", \".join(f\"``{name}``\" for name in self.names)\n + \".\\nPlease see :py:class:`monai.networks.layers.split_args` for additional args parsing.\"\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_LayerFactory.factory_function_LayerFactory.get_constructor.return.fact_args_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_LayerFactory.factory_function_LayerFactory.get_constructor.return.fact_args_", "embedding": null, "metadata": {"file_path": "monai/networks/layers/factories.py", "file_name": "factories.py", "file_type": "text/x-python", "category": "implementation", "start_line": 100, "end_line": 124, "span_ids": ["LayerFactory.get_constructor", "LayerFactory.factory_function"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LayerFactory:\n\n def factory_function(self, name: str) -> Callable:\n \"\"\"\n Decorator for adding a factory function with the given name.\n \"\"\"\n\n def _add(func: Callable) -> Callable:\n self.add_factory_callable(name, func)\n return func\n\n return _add\n\n def get_constructor(self, factory_name: str, *args) -> Any:\n \"\"\"\n Get the constructor for the given factory name and arguments.\n\n Raises:\n TypeError: When ``factory_name`` is not a ``str``.\n\n \"\"\"\n\n if not isinstance(factory_name, str):\n raise TypeError(f\"factory_name must a str but is {type(factory_name).__name__}.\")\n\n fact = self.factories[factory_name.upper()]\n return fact(*args)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_LayerFactory.__getitem___LayerFactory.__getattr__.return.super___getattribute___": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/factories.py_LayerFactory.__getitem___LayerFactory.__getattr__.return.super___getattribute___", "embedding": null, "metadata": {"file_path": "monai/networks/layers/factories.py", "file_name": "factories.py", "file_type": "text/x-python", "category": "implementation", "start_line": 126, "end_line": 153, "span_ids": ["LayerFactory.__getitem__", "LayerFactory.__getattr__"], "tokens": 228}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LayerFactory:\n\n def __getitem__(self, args) -> Any:\n \"\"\"\n Get the given name or name/arguments pair. If `args` is a callable it is assumed to be the constructor\n itself and is returned, otherwise it should be the factory name or a pair containing the name and arguments.\n \"\"\"\n\n # `args[0]` is actually a type or constructor\n if callable(args):\n return args\n\n # `args` is a factory name or a name with arguments\n if isinstance(args, str):\n name_obj, args = args, ()\n else:\n name_obj, *args = args\n\n return self.get_constructor(name_obj, *args)\n\n def __getattr__(self, key):\n \"\"\"\n If `key` is a factory name, return it, otherwise behave as inherited. This allows referring to factory names\n as if they were constants, eg. `Fact.FOO` for a factory Fact with factory function foo.\n \"\"\"\n\n if key in self.factories:\n return key\n\n return super().__getattribute__(key)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_from_typing_import_Option_Classifier.__init__.if_last_act_is_not_None_.self_final_add_module_la": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_from_typing_import_Option_Classifier.__init__.if_last_act_is_not_None_.self_final_add_module_la", "embedding": null, "metadata": {"file_path": "monai/networks/nets/classifier.py", "file_name": "classifier.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 64, "span_ids": ["Classifier.__init__", "Classifier", "docstring"], "tokens": 485}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Sequence, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.layers.factories import Act, Norm, split_args\nfrom monai.networks.nets.regressor import Regressor\n\n__all__ = [\"Classifier\", \"Discriminator\", \"Critic\"]\n\n\nclass Classifier(Regressor):\n \"\"\"\n Defines a classification network from Regressor by specifying the output shape as a single dimensional tensor\n with size equal to the number of classes to predict. The final activation function can also be specified, eg.\n softmax or sigmoid.\n \"\"\"\n\n def __init__(\n self,\n in_shape: Sequence[int],\n classes: int,\n channels: Sequence[int],\n strides: Sequence[int],\n kernel_size: Union[Sequence[int], int] = 3,\n num_res_units: int = 2,\n act=Act.PRELU,\n norm=Norm.INSTANCE,\n dropout: Optional[float] = None,\n bias: bool = True,\n last_act: Optional[str] = None,\n ) -> None:\n \"\"\"\n Args:\n in_shape: tuple of integers stating the dimension of the input tensor (minus batch dimension)\n classes: integer stating the dimension of the final output tensor\n channels: tuple of integers stating the output channels of each convolutional layer\n strides: tuple of integers stating the stride (downscale factor) of each convolutional layer\n kernel_size: integer or tuple of integers stating size of convolutional kernels\n num_res_units: integer stating number of convolutions in residual units, 0 means no residual units\n act: name or type defining activation layers\n norm: name or type defining normalization layers\n dropout: optional float value in range [0, 1] stating dropout probability for layers, None for no dropout\n bias: boolean stating if convolution layers should have a bias component\n last_act: name defining the last activation layer\n \"\"\"\n super().__init__(in_shape, (classes,), channels, strides, kernel_size, num_res_units, act, norm, dropout, bias)\n\n if last_act is not None:\n last_act_name, last_act_args = split_args(last_act)\n last_act_type = Act[last_act_name]\n\n self.final.add_module(\"lastact\", last_act_type(**last_act_args))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_Discriminator_Discriminator.__init__.super___init___in_shape": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_Discriminator_Discriminator.__init__.super___init___in_shape", "embedding": null, "metadata": {"file_path": "monai/networks/nets/classifier.py", "file_name": "classifier.py", "file_type": "text/x-python", "category": "implementation", "start_line": 65, "end_line": 97, "span_ids": ["Discriminator.__init__", "Discriminator"], "tokens": 350}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Discriminator(Classifier):\n \"\"\"\n Defines a discriminator network from Classifier with a single output value and sigmoid activation by default. This\n is meant for use with GANs or other applications requiring a generic discriminator network.\n \"\"\"\n\n def __init__(\n self,\n in_shape: Sequence[int],\n channels: Sequence[int],\n strides: Sequence[int],\n kernel_size: Union[Sequence[int], int] = 3,\n num_res_units: int = 2,\n act=Act.PRELU,\n norm=Norm.INSTANCE,\n dropout: Optional[float] = 0.25,\n bias: bool = True,\n last_act=Act.SIGMOID,\n ) -> None:\n \"\"\"\n Args:\n in_shape: tuple of integers stating the dimension of the input tensor (minus batch dimension)\n channels: tuple of integers stating the output channels of each convolutional layer\n strides: tuple of integers stating the stride (downscale factor) of each convolutional layer\n kernel_size: integer or tuple of integers stating size of convolutional kernels\n num_res_units: integer stating number of convolutions in residual units, 0 means no residual units\n act: name or type defining activation layers\n norm: name or type defining normalization layers\n dropout: optional float value in range [0, 1] stating dropout probability for layers, None for no dropout\n bias: boolean stating if convolution layers should have a bias component\n last_act: name defining the last activation layer\n \"\"\"\n super().__init__(in_shape, 1, channels, strides, kernel_size, num_res_units, act, norm, dropout, bias, last_act)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_Critic_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/classifier.py_Critic_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/classifier.py", "file_name": "classifier.py", "file_type": "text/x-python", "category": "implementation", "start_line": 100, "end_line": 141, "span_ids": ["Critic", "Critic.forward", "Critic.__init__", "Critic._get_final_layer"], "tokens": 422}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Critic(Classifier):\n \"\"\"\n Defines a critic network from Classifier with a single output value and no final activation. The final layer is\n `nn.Flatten` instead of `nn.Linear`, the final result is computed as the mean over the first dimension. This is\n meant to be used with Wasserstein GANs.\n \"\"\"\n\n def __init__(\n self,\n in_shape: Sequence[int],\n channels: Sequence[int],\n strides: Sequence[int],\n kernel_size: Union[Sequence[int], int] = 3,\n num_res_units: int = 2,\n act=Act.PRELU,\n norm=Norm.INSTANCE,\n dropout: Optional[float] = 0.25,\n bias: bool = True,\n ) -> None:\n \"\"\"\n Args:\n in_shape: tuple of integers stating the dimension of the input tensor (minus batch dimension)\n channels: tuple of integers stating the output channels of each convolutional layer\n strides: tuple of integers stating the stride (downscale factor) of each convolutional layer\n kernel_size: integer or tuple of integers stating size of convolutional kernels\n num_res_units: integer stating number of convolutions in residual units, 0 means no residual units\n act: name or type defining activation layers\n norm: name or type defining normalization layers\n dropout: optional float value in range [0, 1] stating dropout probability for layers, None for no dropout\n bias: boolean stating if convolution layers should have a bias component\n \"\"\"\n super().__init__(in_shape, 1, channels, strides, kernel_size, num_res_units, act, norm, dropout, bias, None)\n\n def _get_final_layer(self, in_shape: Sequence[int]):\n return nn.Flatten()\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n x = self.net(x)\n x = self.final(x)\n x = x.mean(1)\n return x.view((x.shape[0], -1))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py__DenseBlock__DenseBlock.__init__.for_i_in_range_layers_.self_add_module_denselay": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py__DenseBlock__DenseBlock.__init__.for_i_in_range_layers_.self_add_module_denselay", "embedding": null, "metadata": {"file_path": "monai/networks/nets/densenet.py", "file_name": "densenet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 57, "end_line": 75, "span_ids": ["_DenseBlock.__init__", "_DenseBlock"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _DenseBlock(nn.Sequential):\n def __init__(\n self, spatial_dims: int, layers: int, in_channels: int, bn_size: int, growth_rate: int, dropout_prob: float\n ) -> None:\n \"\"\"\n Args:\n spatial_dims: number of spatial dimensions of the input image.\n layers: number of layers in the block.\n in_channels: number of the input channel.\n bn_size: multiplicative factor for number of bottle neck layers.\n (i.e. bn_size * k features in the bottleneck layer)\n growth_rate: how many filters to add each layer (k in paper).\n dropout_prob: dropout rate after each dense layer.\n \"\"\"\n super(_DenseBlock, self).__init__()\n for i in range(layers):\n layer = _DenseLayer(spatial_dims, in_channels, growth_rate, bn_size, dropout_prob)\n in_channels += growth_rate\n self.add_module(\"denselayer%d\" % (i + 1), layer)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py__Transition__Transition.__init__.self_add_module_pool_p": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py__Transition__Transition.__init__.self_add_module_pool_p", "embedding": null, "metadata": {"file_path": "monai/networks/nets/densenet.py", "file_name": "densenet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 78, "end_line": 95, "span_ids": ["_Transition", "_Transition.__init__"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class _Transition(nn.Sequential):\n def __init__(self, spatial_dims: int, in_channels: int, out_channels: int) -> None:\n \"\"\"\n Args:\n spatial_dims: number of spatial dimensions of the input image.\n in_channels: number of the input channel.\n out_channels: number of the output classes.\n \"\"\"\n super(_Transition, self).__init__()\n\n conv_type: Callable = Conv[Conv.CONV, spatial_dims]\n norm_type: Callable = Norm[Norm.BATCH, spatial_dims]\n pool_type: Callable = Pool[Pool.AVG, spatial_dims]\n\n self.add_module(\"norm\", norm_type(in_channels))\n self.add_module(\"relu\", nn.ReLU(inplace=True))\n self.add_module(\"conv\", conv_type(in_channels, out_channels, kernel_size=1, bias=False))\n self.add_module(\"pool\", pool_type(kernel_size=2, stride=2))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet_DenseNet._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet_DenseNet._", "embedding": null, "metadata": {"file_path": "monai/networks/nets/densenet.py", "file_name": "densenet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 100, "end_line": 116, "span_ids": ["DenseNet"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DenseNet(nn.Module):\n \"\"\"\n Densenet based on: `Densely Connected Convolutional Networks `_.\n Adapted from `PyTorch Hub 2D version\n `_.\n\n Args:\n spatial_dims: number of spatial dimensions of the input image.\n in_channels: number of the input channel.\n out_channels: number of the output classes.\n init_features: number of filters in the first convolution layer.\n growth_rate: how many filters to add each layer (k in paper).\n block_config: how many layers in each pooling block.\n bn_size: multiplicative factor for number of bottle neck layers.\n (i.e. bn_size * k features in the bottleneck layer)\n dropout_prob: dropout rate after each dense layer.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet.__init___DenseNet.forward.return.x": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_DenseNet.__init___DenseNet.forward.return.x", "embedding": null, "metadata": {"file_path": "monai/networks/nets/densenet.py", "file_name": "densenet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 120, "end_line": 196, "span_ids": ["DenseNet.__init__", "DenseNet.forward"], "tokens": 703}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DenseNet(nn.Module):\n\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n out_channels: int,\n init_features: int = 64,\n growth_rate: int = 32,\n block_config: Sequence[int] = (6, 12, 24, 16),\n bn_size: int = 4,\n dropout_prob: float = 0.0,\n ) -> None:\n\n super(DenseNet, self).__init__()\n\n conv_type: Type[Union[nn.Conv1d, nn.Conv2d, nn.Conv3d]] = Conv[Conv.CONV, spatial_dims]\n norm_type: Type[Union[nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d]] = Norm[Norm.BATCH, spatial_dims]\n pool_type: Type[Union[nn.MaxPool1d, nn.MaxPool2d, nn.MaxPool3d]] = Pool[Pool.MAX, spatial_dims]\n avg_pool_type: Type[Union[nn.AdaptiveAvgPool1d, nn.AdaptiveAvgPool2d, nn.AdaptiveAvgPool3d]] = Pool[\n Pool.ADAPTIVEAVG, spatial_dims\n ]\n\n self.features = nn.Sequential(\n OrderedDict(\n [\n (\"conv0\", conv_type(in_channels, init_features, kernel_size=7, stride=2, padding=3, bias=False)),\n (\"norm0\", norm_type(init_features)),\n (\"relu0\", nn.ReLU(inplace=True)),\n (\"pool0\", pool_type(kernel_size=3, stride=2, padding=1)),\n ]\n )\n )\n\n in_channels = init_features\n for i, num_layers in enumerate(block_config):\n block = _DenseBlock(\n spatial_dims=spatial_dims,\n layers=num_layers,\n in_channels=in_channels,\n bn_size=bn_size,\n growth_rate=growth_rate,\n dropout_prob=dropout_prob,\n )\n self.features.add_module(f\"denseblock{i + 1}\", block)\n in_channels += num_layers * growth_rate\n if i == len(block_config) - 1:\n self.features.add_module(\"norm5\", norm_type(in_channels))\n else:\n _out_channels = in_channels // 2\n trans = _Transition(spatial_dims, in_channels=in_channels, out_channels=_out_channels)\n self.features.add_module(f\"transition{i + 1}\", trans)\n in_channels = _out_channels\n\n # pooling and classification\n self.class_layers = nn.Sequential(\n OrderedDict(\n [\n (\"relu\", nn.ReLU(inplace=True)),\n (\"pool\", avg_pool_type(1)),\n (\"flatten\", nn.Flatten(1)),\n (\"out\", nn.Linear(in_channels, out_channels)),\n ]\n )\n )\n\n for m in self.modules():\n if isinstance(m, conv_type):\n nn.init.kaiming_normal_(torch.as_tensor(m.weight))\n elif isinstance(m, norm_type):\n nn.init.constant_(torch.as_tensor(m.weight), 1)\n nn.init.constant_(torch.as_tensor(m.bias), 0)\n elif isinstance(m, nn.Linear):\n nn.init.constant_(torch.as_tensor(m.bias), 0)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n x = self.features(x)\n x = self.class_layers(x)\n return x", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_from_typing_import_Option_Generator._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_from_typing_import_Option_Generator._", "embedding": null, "metadata": {"file_path": "monai/networks/nets/generator.py", "file_name": "generator.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 35, "span_ids": ["Generator", "docstring"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Sequence, Union\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks import Convolution, ResidualUnit\nfrom monai.networks.layers.factories import Act, Norm\nfrom monai.networks.layers.simplelayers import Reshape\nfrom monai.utils import ensure_tuple, ensure_tuple_rep\n\n\nclass Generator(nn.Module):\n \"\"\"\n Defines a simple generator network accepting a latent vector and through a sequence of convolution layers\n constructs an output tensor of greater size and high dimensionality. The method `_get_layer` is used to\n create each of these layers, override this method to define layers beyond the default Convolution or\n ResidualUnit layers.\n\n For example, a generator accepting a latent vector if shape (42,24) and producing an output volume of\n shape (1,64,64) can be constructed as:\n\n gen = Generator((42, 24), (64, 8, 8), (32, 16, 1), (2, 2, 2))\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_Generator.__init___Generator.__init__.for_i_c_s_in_enumerat.echannel.c": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_Generator.__init___Generator.__init__.for_i_c_s_in_enumerat.echannel.c", "embedding": null, "metadata": {"file_path": "monai/networks/nets/generator.py", "file_name": "generator.py", "file_type": "text/x-python", "category": "implementation", "start_line": 37, "end_line": 97, "span_ids": ["Generator.__init__"], "tokens": 653}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Generator(nn.Module):\n\n def __init__(\n self,\n latent_shape: Sequence[int],\n start_shape: Sequence[int],\n channels: Sequence[int],\n strides: Sequence[int],\n kernel_size: Union[Sequence[int], int] = 3,\n num_res_units: int = 2,\n act=Act.PRELU,\n norm=Norm.INSTANCE,\n dropout: Optional[float] = None,\n bias: bool = True,\n ) -> None:\n \"\"\"\n Construct the generator network with the number of layers defined by `channels` and `strides`. In the\n forward pass a `nn.Linear` layer relates the input latent vector to a tensor of dimensions `start_shape`,\n this is then fed forward through the sequence of convolutional layers. The number of layers is defined by\n the length of `channels` and `strides` which must match, each layer having the number of output channels\n given in `channels` and an upsample factor given in `strides` (ie. a transpose convolution with that stride\n size).\n\n Args:\n latent_shape: tuple of integers stating the dimension of the input latent vector (minus batch dimension)\n start_shape: tuple of integers stating the dimension of the tensor to pass to convolution subnetwork\n channels: tuple of integers stating the output channels of each convolutional layer\n strides: tuple of integers stating the stride (upscale factor) of each convolutional layer\n kernel_size: integer or tuple of integers stating size of convolutional kernels\n num_res_units: integer stating number of convolutions in residual units, 0 means no residual units\n act: name or type defining activation layers\n norm: name or type defining normalization layers\n dropout: optional float value in range [0, 1] stating dropout probability for layers, None for no dropout\n bias: boolean stating if convolution layers should have a bias component\n \"\"\"\n super().__init__()\n\n self.in_channels, *self.start_shape = ensure_tuple(start_shape)\n self.dimensions = len(self.start_shape)\n\n self.latent_shape = ensure_tuple(latent_shape)\n self.channels = ensure_tuple(channels)\n self.strides = ensure_tuple(strides)\n self.kernel_size = ensure_tuple_rep(kernel_size, self.dimensions)\n self.num_res_units = num_res_units\n self.act = act\n self.norm = norm\n self.dropout = dropout\n self.bias = bias\n\n self.flatten = nn.Flatten()\n self.linear = nn.Linear(int(np.prod(self.latent_shape)), int(np.prod(start_shape)))\n self.reshape = Reshape(*start_shape)\n self.conv = nn.Sequential()\n\n echannel = self.in_channels\n\n # transform tensor of shape `start_shape' into output shape through transposed convolutions and residual units\n for i, (c, s) in enumerate(zip(channels, strides)):\n is_last = i == len(channels) - 1\n layer = self._get_layer(echannel, c, s, is_last)\n self.conv.add_module(\"layer_%i\" % i, layer)\n echannel = c", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_Generator._get_layer_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/generator.py_Generator._get_layer_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/generator.py", "file_name": "generator.py", "file_type": "text/x-python", "category": "implementation", "start_line": 99, "end_line": 148, "span_ids": ["Generator.forward", "Generator._get_layer"], "tokens": 340}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Generator(nn.Module):\n\n def _get_layer(\n self, in_channels: int, out_channels: int, strides: int, is_last: bool\n ) -> Union[Convolution, nn.Sequential]:\n \"\"\"\n Returns a layer accepting inputs with `in_channels` number of channels and producing outputs of `out_channels`\n number of channels. The `strides` indicates upsampling factor, ie. transpose convolutional stride. If `is_last`\n is True this is the final layer and is not expected to include activation and normalization layers.\n \"\"\"\n\n layer: Union[Convolution, nn.Sequential]\n\n layer = Convolution(\n in_channels=in_channels,\n strides=strides,\n is_transposed=True,\n conv_only=is_last or self.num_res_units > 0,\n dimensions=self.dimensions,\n out_channels=out_channels,\n kernel_size=self.kernel_size,\n act=self.act,\n norm=self.norm,\n dropout=self.dropout,\n bias=self.bias,\n )\n\n if self.num_res_units > 0:\n ru = ResidualUnit(\n in_channels=out_channels,\n subunits=self.num_res_units,\n last_conv_only=is_last,\n dimensions=self.dimensions,\n out_channels=out_channels,\n kernel_size=self.kernel_size,\n act=self.act,\n norm=self.norm,\n dropout=self.dropout,\n bias=self.bias,\n )\n\n layer = nn.Sequential(layer, ru)\n\n return layer\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n x = self.flatten(x)\n x = self.linear(x)\n x = self.reshape(x)\n x = self.conv(x)\n return x", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_HighResNet_HighResNet._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_HighResNet_HighResNet._", "embedding": null, "metadata": {"file_path": "monai/networks/nets/highresnet.py", "file_name": "highresnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 123, "span_ids": ["HighResNet"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HighResNet(nn.Module):\n \"\"\"\n Reimplementation of highres3dnet based on\n Li et al., \"On the compactness, efficiency, and representation of 3D\n convolutional networks: Brain parcellation as a pretext task\", IPMI '17\n\n Adapted from:\n https://github.com/NifTK/NiftyNet/blob/v0.6.0/niftynet/network/highres3dnet.py\n https://github.com/fepegar/highresnet\n\n Args:\n spatial_dims: number of spatial dimensions of the input image.\n in_channels: number of input channels.\n out_channels: number of output channels.\n norm_type: feature normalization type and arguments.\n Defaults to ``(\"batch\", {\"affine\": True})``.\n acti_type: activation type and arguments.\n Defaults to ``(\"relu\", {\"inplace\": True})``.\n dropout_prob: probability of the feature map to be zeroed\n (only applies to the penultimate conv layer).\n layer_params: specifying key parameters of each layer/block.\n channel_matching: {``\"pad\"``, ``\"project\"``}\n Specifies handling residual branch and conv branch channel mismatches. Defaults to ``\"pad\"``.\n\n - ``\"pad\"``: with zero padding.\n - ``\"project\"``: with a trainable conv with kernel size one.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_HighResNet.__init___": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_HighResNet.__init___", "embedding": null, "metadata": {"file_path": "monai/networks/nets/highresnet.py", "file_name": "highresnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 125, "end_line": 209, "span_ids": ["HighResNet.forward", "HighResNet.__init__"], "tokens": 660}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class HighResNet(nn.Module):\n\n def __init__(\n self,\n spatial_dims: int = 3,\n in_channels: int = 1,\n out_channels: int = 1,\n norm_type: Union[str, tuple] = (\"batch\", {\"affine\": True}),\n acti_type: Union[str, tuple] = (\"relu\", {\"inplace\": True}),\n dropout_prob: Optional[Union[Tuple, str, float]] = 0.0,\n layer_params: Sequence[Dict] = DEFAULT_LAYER_PARAMS_3D,\n channel_matching: Union[ChannelMatching, str] = ChannelMatching.PAD,\n ) -> None:\n\n super(HighResNet, self).__init__()\n blocks = nn.ModuleList()\n\n # initial conv layer\n params = layer_params[0]\n _in_chns, _out_chns = in_channels, params[\"n_features\"]\n blocks.append(\n Convolution(\n dimensions=spatial_dims,\n in_channels=_in_chns,\n out_channels=_out_chns,\n kernel_size=params[\"kernel_size\"],\n adn_ordering=\"NA\",\n act=acti_type,\n norm=norm_type,\n )\n )\n\n # residual blocks\n for (idx, params) in enumerate(layer_params[1:-2]): # res blocks except the 1st and last two conv layers.\n _in_chns, _out_chns = _out_chns, params[\"n_features\"]\n _dilation = 2 ** idx\n for _ in range(params[\"repeat\"]):\n blocks.append(\n HighResBlock(\n spatial_dims=spatial_dims,\n in_channels=_in_chns,\n out_channels=_out_chns,\n kernels=params[\"kernels\"],\n dilation=_dilation,\n norm_type=norm_type,\n acti_type=acti_type,\n channel_matching=channel_matching,\n )\n )\n _in_chns = _out_chns\n\n # final conv layers\n params = layer_params[-2]\n _in_chns, _out_chns = _out_chns, params[\"n_features\"]\n blocks.append(\n Convolution(\n dimensions=spatial_dims,\n in_channels=_in_chns,\n out_channels=_out_chns,\n kernel_size=params[\"kernel_size\"],\n adn_ordering=\"NAD\",\n act=acti_type,\n norm=norm_type,\n dropout=dropout_prob,\n )\n )\n\n params = layer_params[-1]\n _in_chns = _out_chns\n blocks.append(\n Convolution(\n dimensions=spatial_dims,\n in_channels=_in_chns,\n out_channels=out_channels,\n kernel_size=params[\"kernel_size\"],\n adn_ordering=\"NAD\",\n act=acti_type,\n norm=norm_type,\n dropout=dropout_prob,\n )\n )\n\n self.blocks = nn.Sequential(*blocks)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n return torch.as_tensor(self.blocks(x))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regressor.py_from_typing_import_Option_Regressor.__init__.self.final.self__get_final_layer_ec": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regressor.py_from_typing_import_Option_Regressor.__init__.self.final.self__get_final_layer_ec", "embedding": null, "metadata": {"file_path": "monai/networks/nets/regressor.py", "file_name": "regressor.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 91, "span_ids": ["Regressor", "Regressor.__init__", "docstring"], "tokens": 774}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Sequence, Union\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks import Convolution, ResidualUnit\nfrom monai.networks.layers.convutils import calculate_out_shape, same_padding\nfrom monai.networks.layers.factories import Act, Norm\nfrom monai.networks.layers.simplelayers import Reshape\nfrom monai.utils import ensure_tuple, ensure_tuple_rep\n\n\nclass Regressor(nn.Module):\n \"\"\"\n This defines a network for relating large-sized input tensors to small output tensors, ie. regressing large\n values to a prediction. An output of a single dimension can be used as value regression or multi-label\n classification prediction, an output of a single value can be used as a discriminator or critic prediction.\n \"\"\"\n\n def __init__(\n self,\n in_shape: Sequence[int],\n out_shape: Sequence[int],\n channels: Sequence[int],\n strides: Sequence[int],\n kernel_size: Union[Sequence[int], int] = 3,\n num_res_units: int = 2,\n act=Act.PRELU,\n norm=Norm.INSTANCE,\n dropout: Optional[float] = None,\n bias: bool = True,\n ) -> None:\n \"\"\"\n Construct the regressor network with the number of layers defined by `channels` and `strides`. Inputs are\n first passed through the convolutional layers in the forward pass, the output from this is then pass\n through a fully connected layer to relate them to the final output tensor.\n\n Args:\n in_shape: tuple of integers stating the dimension of the input tensor (minus batch dimension)\n out_shape: tuple of integers stating the dimension of the final output tensor\n channels: tuple of integers stating the output channels of each convolutional layer\n strides: tuple of integers stating the stride (downscale factor) of each convolutional layer\n kernel_size: integer or tuple of integers stating size of convolutional kernels\n num_res_units: integer stating number of convolutions in residual units, 0 means no residual units\n act: name or type defining activation layers\n norm: name or type defining normalization layers\n dropout: optional float value in range [0, 1] stating dropout probability for layers, None for no dropout\n bias: boolean stating if convolution layers should have a bias component\n \"\"\"\n super().__init__()\n\n self.in_channels, *self.in_shape = ensure_tuple(in_shape)\n self.dimensions = len(self.in_shape)\n self.channels = ensure_tuple(channels)\n self.strides = ensure_tuple(strides)\n self.out_shape = ensure_tuple(out_shape)\n self.kernel_size = ensure_tuple_rep(kernel_size, self.dimensions)\n self.num_res_units = num_res_units\n self.act = act\n self.norm = norm\n self.dropout = dropout\n self.bias = bias\n self.net = nn.Sequential()\n\n echannel = self.in_channels\n\n padding = same_padding(kernel_size)\n\n self.final_size = np.asarray(self.in_shape, dtype=int)\n self.reshape = Reshape(*self.out_shape)\n\n # encode stage\n for i, (c, s) in enumerate(zip(self.channels, self.strides)):\n layer = self._get_layer(echannel, c, s, i == len(channels) - 1)\n echannel = c # use the output channel number as the input for the next loop\n self.net.add_module(\"layer_%i\" % i, layer)\n self.final_size = calculate_out_shape(self.final_size, kernel_size, s, padding) # type: ignore\n\n self.final = self._get_final_layer((echannel,) + self.final_size)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regressor.py_Regressor._get_layer_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/regressor.py_Regressor._get_layer_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/regressor.py", "file_name": "regressor.py", "file_type": "text/x-python", "category": "implementation", "start_line": 93, "end_line": 143, "span_ids": ["Regressor.forward", "Regressor._get_final_layer", "Regressor._get_layer"], "tokens": 363}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Regressor(nn.Module):\n\n def _get_layer(\n self, in_channels: int, out_channels: int, strides: int, is_last: bool\n ) -> Union[ResidualUnit, Convolution]:\n \"\"\"\n Returns a layer accepting inputs with `in_channels` number of channels and producing outputs of `out_channels`\n number of channels. The `strides` indicates downsampling factor, ie. convolutional stride. If `is_last`\n is True this is the final layer and is not expected to include activation and normalization layers.\n \"\"\"\n\n layer: Union[ResidualUnit, Convolution]\n\n if self.num_res_units > 0:\n layer = ResidualUnit(\n subunits=self.num_res_units,\n last_conv_only=is_last,\n dimensions=self.dimensions,\n in_channels=in_channels,\n out_channels=out_channels,\n strides=strides,\n kernel_size=self.kernel_size,\n act=self.act,\n norm=self.norm,\n dropout=self.dropout,\n bias=self.bias,\n )\n else:\n layer = Convolution(\n conv_only=is_last,\n dimensions=self.dimensions,\n in_channels=in_channels,\n out_channels=out_channels,\n strides=strides,\n kernel_size=self.kernel_size,\n act=self.act,\n norm=self.norm,\n dropout=self.dropout,\n bias=self.bias,\n )\n\n return layer\n\n def _get_final_layer(self, in_shape: Sequence[int]):\n linear = nn.Linear(int(np.product(in_shape)), int(np.product(self.out_shape)))\n return nn.Sequential(nn.Flatten(), linear)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n x = self.net(x)\n x = self.final(x)\n x = self.reshape(x)\n return x", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_UNet._get_down_layer_UNet._get_bottom_layer.return.self__get_down_layer_in_c": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_UNet._get_down_layer_UNet._get_bottom_layer.return.self__get_down_layer_in_c", "embedding": null, "metadata": {"file_path": "monai/networks/nets/unet.py", "file_name": "unet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 109, "end_line": 146, "span_ids": ["UNet._get_down_layer", "UNet._get_bottom_layer"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@export(\"monai.networks.nets\")\n@alias(\"Unet\")\nclass UNet(nn.Module):\n\n def _get_down_layer(self, in_channels: int, out_channels: int, strides: int, is_top: bool) -> nn.Module:\n \"\"\"\n Args:\n in_channels: number of input channels.\n out_channels: number of output channels.\n strides: convolution stride.\n is_top: True if this is the top block.\n \"\"\"\n if self.num_res_units > 0:\n return ResidualUnit(\n self.dimensions,\n in_channels,\n out_channels,\n strides=strides,\n kernel_size=self.kernel_size,\n subunits=self.num_res_units,\n act=self.act,\n norm=self.norm,\n dropout=self.dropout,\n )\n return Convolution(\n self.dimensions,\n in_channels,\n out_channels,\n strides=strides,\n kernel_size=self.kernel_size,\n act=self.act,\n norm=self.norm,\n dropout=self.dropout,\n )\n\n def _get_bottom_layer(self, in_channels: int, out_channels: int) -> nn.Module:\n \"\"\"\n Args:\n in_channels: number of input channels.\n out_channels: number of output channels.\n \"\"\"\n return self._get_down_layer(in_channels, out_channels, 1, False)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_UNet._get_up_layer_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_UNet._get_up_layer_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/unet.py", "file_name": "unet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 148, "end_line": 194, "span_ids": ["impl:3", "UNet.forward", "UNet._get_up_layer"], "tokens": 292}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@export(\"monai.networks.nets\")\n@alias(\"Unet\")\nclass UNet(nn.Module):\n\n def _get_up_layer(self, in_channels: int, out_channels: int, strides: int, is_top: bool) -> nn.Module:\n \"\"\"\n Args:\n in_channels: number of input channels.\n out_channels: number of output channels.\n strides: convolution stride.\n is_top: True if this is the top block.\n \"\"\"\n conv: Union[Convolution, nn.Sequential]\n\n conv = Convolution(\n self.dimensions,\n in_channels,\n out_channels,\n strides=strides,\n kernel_size=self.up_kernel_size,\n act=self.act,\n norm=self.norm,\n dropout=self.dropout,\n conv_only=is_top and self.num_res_units == 0,\n is_transposed=True,\n )\n\n if self.num_res_units > 0:\n ru = ResidualUnit(\n self.dimensions,\n out_channels,\n out_channels,\n strides=1,\n kernel_size=self.kernel_size,\n subunits=1,\n act=self.act,\n norm=self.norm,\n dropout=self.dropout,\n last_conv_only=is_top,\n )\n conv = nn.Sequential(conv, ru)\n\n return conv\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n x = self.model(x)\n return x\n\n\nUnet = unet = UNet", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_warnings_one_hot.return.labels": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_warnings_one_hot.return.labels", "embedding": null, "metadata": {"file_path": "monai/networks/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 15, "end_line": 64, "span_ids": ["one_hot", "docstring"], "tokens": 386}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import warnings\nfrom contextlib import contextmanager\nfrom typing import Any, Callable, Optional, Sequence\n\nimport torch\nimport torch.nn as nn\n\n__all__ = [\n \"one_hot\",\n \"slice_channels\",\n \"predict_segmentation\",\n \"normalize_transform\",\n \"to_norm_affine\",\n \"normal_init\",\n \"icnr_init\",\n \"pixelshuffle\",\n \"eval_mode\",\n \"train_mode\",\n]\n\n\ndef one_hot(labels: torch.Tensor, num_classes: int, dtype: torch.dtype = torch.float, dim: int = 1) -> torch.Tensor:\n \"\"\"\n For a tensor `labels` of dimensions B1[spatial_dims], return a tensor of dimensions `BN[spatial_dims]`\n for `num_classes` N number of classes.\n\n Example:\n\n For every value v = labels[b,1,h,w], the value in the result at [b,v,h,w] will be 1 and all others 0.\n Note that this will include the background label, thus a binary mask should be treated as having 2 classes.\n \"\"\"\n if labels.dim() <= 0:\n raise AssertionError(\"labels should have dim of 1 or more.\")\n\n # if `dim` is bigger, add singleton dim at the end\n if labels.ndim < dim + 1:\n shape = list(labels.shape) + [1] * (dim + 1 - len(labels.shape))\n labels = torch.reshape(labels, shape)\n\n sh = list(labels.shape)\n\n if sh[dim] != 1:\n raise AssertionError(\"labels should have a channel with length equal to one.\")\n\n sh[dim] = num_classes\n\n o = torch.zeros(size=sh, dtype=dtype, device=labels.device)\n labels = o.scatter_(dim=dim, index=labels.long(), value=1)\n\n return labels", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_normalize_transform_normalize_transform.return.norm": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_normalize_transform_normalize_transform.return.norm", "embedding": null, "metadata": {"file_path": "monai/networks/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 82, "end_line": 114, "span_ids": ["normalize_transform"], "tokens": 372}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def normalize_transform(\n shape: Sequence[int],\n device: Optional[torch.device] = None,\n dtype: Optional[torch.dtype] = None,\n align_corners: bool = False,\n) -> torch.Tensor:\n \"\"\"\n Compute an affine matrix according to the input shape.\n The transform normalizes the homogeneous image coordinates to the\n range of `[-1, 1]`.\n\n Args:\n shape: input spatial shape\n device: device on which the returned affine will be allocated.\n dtype: data type of the returned affine\n align_corners: if True, consider -1 and 1 to refer to the centers of the\n corner pixels rather than the image corners.\n See also: https://pytorch.org/docs/stable/nn.functional.html#torch.nn.functional.grid_sample\n \"\"\"\n norm = torch.tensor(shape, dtype=torch.float64, device=device) # no in-place change\n if align_corners:\n norm[norm <= 1.0] = 2.0\n norm = 2.0 / (norm - 1.0)\n norm = torch.diag(torch.cat((norm, torch.ones((1,), dtype=torch.float64, device=device))))\n norm[:-1, -1] = -1.0\n else:\n norm[norm <= 0.0] = 2.0\n norm = 2.0 / norm\n norm = torch.diag(torch.cat((norm, torch.ones((1,), dtype=torch.float64, device=device))))\n norm[:-1, -1] = 1.0 / torch.tensor(shape, dtype=torch.float64, device=device) - 1.0\n norm = norm.unsqueeze(0).to(dtype=dtype)\n norm.requires_grad = False\n return norm", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_adaptor._inner_adaptor.return._inner": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_adaptor._inner_adaptor.return._inner", "embedding": null, "metadata": {"file_path": "monai/transforms/adaptors.py", "file_name": "adaptors.py", "file_type": "text/x-python", "category": "implementation", "start_line": 149, "end_line": 214, "span_ids": ["adaptor"], "tokens": 569}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@_monai_export(\"monai.transforms\")\ndef adaptor(function, outputs, inputs=None):\n # ... other code\n\n def _inner(ditems):\n\n sig = FunctionSignature(function)\n\n if sig.found_kwargs:\n must_be_types_or_none(\"inputs\", inputs, (dict,))\n # we just forward all arguments unless we have been provided an input map\n if inputs is None:\n dinputs = dict(ditems)\n else:\n # dict\n dinputs = map_names(ditems, inputs)\n\n else:\n # no **kwargs\n # select only items from the method signature\n dinputs = {k: v for k, v in ditems.items() if k in sig.non_var_parameters}\n must_be_types_or_none(\"inputs\", inputs, (str, list, tuple, dict))\n if inputs is None:\n pass\n elif isinstance(inputs, str):\n if len(sig.non_var_parameters) != 1:\n raise ValueError(\"if 'inputs' is a string, function may only have a single non-variadic parameter\")\n dinputs = {inputs: ditems[inputs]}\n elif isinstance(inputs, (list, tuple)):\n dinputs = {k: dinputs[k] for k in inputs}\n else:\n # dict\n dinputs = map_only_names(ditems, inputs)\n\n ret = function(**dinputs)\n\n # now the mapping back to the output dictionary depends on outputs and what was returned from the function\n op = outputs\n if isinstance(ret, dict):\n must_be_types_or_none(\"outputs\", op, (dict,))\n if op is not None:\n ret = {v: ret[k] for k, v in op.items()}\n elif isinstance(ret, (list, tuple)):\n if len(ret) == 1:\n must_be_types(\"outputs\", op, (str, list, tuple))\n else:\n must_be_types(\"outputs\", op, (list, tuple))\n\n if isinstance(op, str):\n op = [op]\n\n if len(ret) != len(outputs):\n raise ValueError(\"'outputs' must have the same length as the number of elements that were returned\")\n\n ret = dict(zip(op, ret))\n else:\n must_be_types(\"outputs\", op, (str, list, tuple))\n if isinstance(op, (list, tuple)):\n if len(op) != 1:\n raise ValueError(\"'outputs' must be of length one if it is a list or tuple\")\n op = op[0]\n ret = {op: ret}\n\n ditems = dict(ditems)\n for k, v in ret.items():\n ditems[k] = v\n\n return ditems\n\n return _inner", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_apply_alias_to_kwargs.return._inner": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_apply_alias_to_kwargs.return._inner", "embedding": null, "metadata": {"file_path": "monai/transforms/adaptors.py", "file_name": "adaptors.py", "file_type": "text/x-python", "category": "implementation", "start_line": 213, "end_line": 239, "span_ids": ["apply_alias", "to_kwargs"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@_monai_export(\"monai.transforms\")\ndef apply_alias(fn, name_map):\n def _inner(data):\n\n # map names\n pre_call = dict(data)\n for _from, _to in name_map.items():\n pre_call[_to] = pre_call.pop(_from)\n\n # execute\n post_call = fn(pre_call)\n\n # map names back\n for _from, _to in name_map.items():\n post_call[_from] = post_call.pop(_to)\n\n return post_call\n\n return _inner\n\n\n@_monai_export(\"monai.transforms\")\ndef to_kwargs(fn):\n def _inner(data):\n return fn(**data)\n\n return _inner", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_FunctionSignature_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_FunctionSignature_", "embedding": null, "metadata": {"file_path": "monai/transforms/adaptors.py", "file_name": "adaptors.py", "file_type": "text/x-python", "category": "implementation", "start_line": 244, "end_line": 268, "span_ids": ["FunctionSignature.__repr__", "FunctionSignature", "FunctionSignature.__str__", "FunctionSignature.__init__"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FunctionSignature:\n def __init__(self, function: Callable) -> None:\n import inspect\n\n sfn = inspect.signature(function)\n self.found_args = False\n self.found_kwargs = False\n self.defaults = {}\n self.non_var_parameters = set()\n for p in sfn.parameters.values():\n if p.kind is inspect.Parameter.VAR_POSITIONAL:\n self.found_args = True\n if p.kind is inspect.Parameter.VAR_KEYWORD:\n self.found_kwargs = True\n else:\n self.non_var_parameters.add(p.name)\n self.defaults[p.name] = p.default is not p.empty\n\n def __repr__(self) -> str:\n s = \" str:\n return self.__repr__()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/compose.py_Compose_Compose.__init__.self_set_random_state_see": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/compose.py_Compose_Compose.__init__.self_set_random_state_see", "embedding": null, "metadata": {"file_path": "monai/transforms/compose.py", "file_name": "compose.py", "file_type": "text/x-python", "category": "implementation", "start_line": 35, "end_line": 100, "span_ids": ["Compose.__init__", "Compose"], "tokens": 724}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Compose(Randomizable, InvertibleTransform):\n \"\"\"\n ``Compose`` provides the ability to chain a series of calls together in a\n sequence. Each transform in the sequence must take a single argument and\n return a single value, so that the transforms can be called in a chain.\n\n ``Compose`` can be used in two ways:\n\n #. With a series of transforms that accept and return a single\n ndarray / tensor / tensor-like parameter.\n #. With a series of transforms that accept and return a dictionary that\n contains one or more parameters. Such transforms must have pass-through\n semantics; unused values in the dictionary must be copied to the return\n dictionary. It is required that the dictionary is copied between input\n and output of each transform.\n\n If some transform generates a list batch of data in the transform chain,\n every item in the list is still a dictionary, and all the following\n transforms will apply to every item of the list, for example:\n\n #. transformA normalizes the intensity of 'img' field in the dict data.\n #. transformB crops out a list batch of images on 'img' and 'seg' field.\n And constructs a list of dict data, other fields are copied::\n\n { [{ {\n 'img': [1, 2], 'img': [1], 'img': [2],\n 'seg': [1, 2], 'seg': [1], 'seg': [2],\n 'extra': 123, --> 'extra': 123, 'extra': 123,\n 'shape': 'CHWD' 'shape': 'CHWD' 'shape': 'CHWD'\n } }, }]\n\n #. transformC then randomly rotates or flips 'img' and 'seg' fields of\n every dictionary item in the list.\n\n The composed transforms will be set the same global random seed if user called\n `set_determinism()`.\n\n When using the pass-through dictionary operation, you can make use of\n :class:`monai.transforms.adaptors.adaptor` to wrap transforms that don't conform\n to the requirements. This approach allows you to use transforms from\n otherwise incompatible libraries with minimal additional work.\n\n Note:\n\n In many cases, Compose is not the best way to create pre-processing\n pipelines. Pre-processing is often not a strictly sequential series of\n operations, and much of the complexity arises when a not-sequential\n set of functions must be called as if it were a sequence.\n\n Example: images and labels\n Images typically require some kind of normalization that labels do not.\n Both are then typically augmented through the use of random rotations,\n flips, and deformations.\n Compose can be used with a series of transforms that take a dictionary\n that contains 'image' and 'label' entries. This might require wrapping\n `torchvision` transforms before passing them to compose.\n Alternatively, one can create a class with a `__call__` function that\n calls your pre-processing functions taking into account that not all of\n them are called on the labels.\n \"\"\"\n\n def __init__(self, transforms: Optional[Union[Sequence[Callable], Callable]] = None) -> None:\n if transforms is None:\n transforms = []\n self.transforms = ensure_tuple(transforms)\n self.set_random_state(seed=get_seed())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/__init__.py__": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/__init__.py__", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 11, "end_line": 11, "span_ids": [], "tokens": 0}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_BorderPad_BorderPad.__init__.self.mode.NumpyPadMode_mode_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_BorderPad_BorderPad.__init__.self.mode.NumpyPadMode_mode_", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 104, "end_line": 130, "span_ids": ["BorderPad.__init__", "BorderPad"], "tokens": 422}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BorderPad(Transform):\n \"\"\"\n Pad the input data by adding specified borders to every dimension.\n\n Args:\n spatial_border: specified size for every spatial border. Any -ve values will be set to 0. It can be 3 shapes:\n\n - single int number, pad all the borders with the same size.\n - length equals the length of image shape, pad every spatial dimension separately.\n for example, image shape(CHW) is [1, 4, 4], spatial_border is [2, 1],\n pad every border of H dim with 2, pad every border of W dim with 1, result shape is [1, 8, 6].\n - length equals 2 x (length of image shape), pad every border of every dimension separately.\n for example, image shape(CHW) is [1, 4, 4], spatial_border is [1, 2, 3, 4], pad top of H dim with 1,\n pad bottom of H dim with 2, pad left of W dim with 3, pad right of W dim with 4.\n the result shape is [1, 7, 11].\n\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function. Defaults to ``\"constant\"``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n \"\"\"\n\n def __init__(\n self, spatial_border: Union[Sequence[int], int], mode: Union[NumpyPadMode, str] = NumpyPadMode.CONSTANT\n ) -> None:\n self.spatial_border = spatial_border\n self.mode: NumpyPadMode = NumpyPadMode(mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_BorderPad.__call___BorderPad.__call__.return.np_pad_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_BorderPad.__call___BorderPad.__call__.return.np_pad_", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 132, "end_line": 168, "span_ids": ["BorderPad.__call__"], "tokens": 514}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BorderPad(Transform):\n\n def __call__(self, img: np.ndarray, mode: Optional[Union[NumpyPadMode, str]] = None):\n \"\"\"\n Args:\n img: data to be transformed, assuming `img` is channel-first and\n padding doesn't apply to the channel dim.\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function. Defaults to ``self.mode``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n\n Raises:\n ValueError: When ``self.spatial_border`` does not contain ints.\n ValueError: When ``self.spatial_border`` length is not one of\n [1, len(spatial_shape), 2*len(spatial_shape)].\n\n \"\"\"\n spatial_shape = img.shape[1:]\n spatial_border = ensure_tuple(self.spatial_border)\n if not all(isinstance(b, int) for b in spatial_border):\n raise ValueError(f\"self.spatial_border must contain only ints, got {spatial_border}.\")\n spatial_border = tuple(max(0, b) for b in spatial_border)\n\n if len(spatial_border) == 1:\n data_pad_width = [(spatial_border[0], spatial_border[0]) for _ in range(len(spatial_shape))]\n elif len(spatial_border) == len(spatial_shape):\n data_pad_width = [(spatial_border[i], spatial_border[i]) for i in range(len(spatial_shape))]\n elif len(spatial_border) == len(spatial_shape) * 2:\n data_pad_width = [(spatial_border[2 * i], spatial_border[2 * i + 1]) for i in range(len(spatial_shape))]\n else:\n raise ValueError(\n f\"Unsupported spatial_border length: {len(spatial_border)}, available options are \"\n f\"[1, len(spatial_shape)={len(spatial_shape)}, 2*len(spatial_shape)={2*len(spatial_shape)}].\"\n )\n\n return np.pad(\n img, [(0, 0)] + data_pad_width, mode=self.mode.value if mode is None else NumpyPadMode(mode).value\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_DivisiblePad_DivisiblePad.__init__.self.mode.NumpyPadMode_mode_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_DivisiblePad_DivisiblePad.__init__.self.mode.NumpyPadMode_mode_", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 147, "end_line": 166, "span_ids": ["DivisiblePad.__init__", "DivisiblePad"], "tokens": 263}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DivisiblePad(Transform):\n \"\"\"\n Pad the input data, so that the spatial sizes are divisible by `k`.\n \"\"\"\n\n def __init__(self, k: Union[Sequence[int], int], mode: Union[NumpyPadMode, str] = NumpyPadMode.CONSTANT) -> None:\n \"\"\"\n Args:\n k: the target k for each spatial dimension.\n if `k` is negative or 0, the original size is preserved.\n if `k` is an int, the same `k` be applied to all the input spatial dimensions.\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function. Defaults to ``\"constant\"``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n\n See also :py:class:`monai.transforms.SpatialPad`\n \"\"\"\n self.k = k\n self.mode: NumpyPadMode = NumpyPadMode(mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_DivisiblePad.__call___DivisiblePad.__call__.return.spatial_pad_img_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_DivisiblePad.__call___DivisiblePad.__call__.return.spatial_pad_img_", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 173, "end_line": 191, "span_ids": ["DivisiblePad.__call__"], "tokens": 286}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DivisiblePad(Transform):\n\n def __call__(self, img: np.ndarray, mode: Optional[Union[NumpyPadMode, str]] = None) -> np.ndarray:\n \"\"\"\n Args:\n img: data to be transformed, assuming `img` is channel-first\n and padding doesn't apply to the channel dim.\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function. Defaults to ``self.mode``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n \"\"\"\n spatial_shape = img.shape[1:]\n k = fall_back_tuple(self.k, (1,) * len(spatial_shape))\n new_size = []\n for k_d, dim in zip(k, spatial_shape):\n new_dim = int(np.ceil(dim / k_d) * k_d) if k_d > 0 else dim\n new_size.append(new_dim)\n\n spatial_pad = SpatialPad(spatial_size=new_size, method=Method.SYMMETRIC, mode=mode or self.mode)\n return spatial_pad(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_CenterSpatialCrop_CenterSpatialCrop.__call__.return.cropper_img_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_CenterSpatialCrop_CenterSpatialCrop.__call__.return.cropper_img_", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 271, "end_line": 291, "span_ids": ["CenterSpatialCrop", "CenterSpatialCrop.__call__", "CenterSpatialCrop.__init__"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CenterSpatialCrop(Transform):\n \"\"\"\n Crop at the center of image with specified ROI size.\n\n Args:\n roi_size: the spatial size of the crop region e.g. [224,224,128]\n If its components have non-positive values, the corresponding size of input image will be used.\n \"\"\"\n\n def __init__(self, roi_size: Union[Sequence[int], int]) -> None:\n self.roi_size = roi_size\n\n def __call__(self, img: np.ndarray):\n \"\"\"\n Apply the transform to `img`, assuming `img` is channel-first and\n slicing doesn't apply to the channel dim.\n \"\"\"\n self.roi_size = fall_back_tuple(self.roi_size, img.shape[1:])\n center = [i // 2 for i in img.shape[1:]]\n cropper = SpatialCrop(roi_center=center, roi_size=self.roi_size)\n return cropper(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandSpatialCrop_RandSpatialCrop.__init__.self._slices.None": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandSpatialCrop_RandSpatialCrop.__init__.self._slices.None", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 294, "end_line": 315, "span_ids": ["RandSpatialCrop.__init__", "RandSpatialCrop"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandSpatialCrop(Randomizable):\n \"\"\"\n Crop image with random size or specific size ROI. It can crop at a random position as center\n or at the image center. And allows to set the minimum size to limit the randomly generated ROI.\n\n Args:\n roi_size: if `random_size` is True, it specifies the minimum crop region.\n if `random_size` is False, it specifies the expected ROI size to crop. e.g. [224, 224, 128]\n If its components have non-positive values, the corresponding size of input image will be used.\n random_center: crop at random position as center or the image center.\n random_size: crop with random size or specific size ROI.\n The actual size is sampled from `randint(roi_size, img_size)`.\n \"\"\"\n\n def __init__(\n self, roi_size: Union[Sequence[int], int], random_center: bool = True, random_size: bool = True\n ) -> None:\n self.roi_size = roi_size\n self.random_center = random_center\n self.random_size = random_size\n self._size: Optional[Sequence[int]] = None\n self._slices: Optional[Tuple[slice, ...]] = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandSpatialCropSamples_RandSpatialCropSamples.__call__.return._self_cropper_img_for___": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandSpatialCropSamples_RandSpatialCropSamples.__call__.return._self_cropper_img_for___", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 339, "end_line": 386, "span_ids": ["RandSpatialCropSamples", "RandSpatialCropSamples.__call__", "RandSpatialCropSamples.__init__", "RandSpatialCropSamples.set_random_state", "RandSpatialCropSamples.randomize"], "tokens": 451}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandSpatialCropSamples(Randomizable):\n \"\"\"\n Crop image with random size or specific size ROI to generate a list of N samples.\n It can crop at a random position as center or at the image center. And allows to set\n the minimum size to limit the randomly generated ROI.\n It will return a list of cropped images.\n\n Args:\n roi_size: if `random_size` is True, the spatial size of the minimum crop region.\n if `random_size` is False, specify the expected ROI size to crop. e.g. [224, 224, 128]\n num_samples: number of samples (crop regions) to take in the returned list.\n random_center: crop at random position as center or the image center.\n random_size: crop with random size or specific size ROI.\n The actual size is sampled from `randint(roi_size, img_size)`.\n\n Raises:\n ValueError: When ``num_samples`` is nonpositive.\n\n \"\"\"\n\n def __init__(\n self,\n roi_size: Union[Sequence[int], int],\n num_samples: int,\n random_center: bool = True,\n random_size: bool = True,\n ) -> None:\n if num_samples < 1:\n raise ValueError(f\"num_samples must be positive, got {num_samples}.\")\n self.num_samples = num_samples\n self.cropper = RandSpatialCrop(roi_size, random_center, random_size)\n\n def set_random_state(\n self, seed: Optional[int] = None, state: Optional[np.random.RandomState] = None\n ) -> \"Randomizable\":\n super().set_random_state(seed=seed, state=state)\n self.cropper.set_random_state(state=self.R)\n return self\n\n def randomize(self, data: Optional[Any] = None) -> None:\n pass\n\n def __call__(self, img: np.ndarray) -> List[np.ndarray]:\n \"\"\"\n Apply the transform to `img`, assuming `img` is channel-first and\n cropping doesn't change the channel dim.\n \"\"\"\n return [self.cropper(img) for _ in range(self.num_samples)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialPadd_SpatialPadd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialPadd_SpatialPadd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 92, "end_line": 131, "span_ids": ["SpatialPadd", "SpatialPadd.__init__", "SpatialPadd.__call__"], "tokens": 495}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SpatialPadd(MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.SpatialPad`.\n Performs padding to the data, symmetric for all sides or all on one side for each dimension.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n spatial_size: Union[Sequence[int], int],\n method: Union[Method, str] = Method.SYMMETRIC,\n mode: NumpyPadModeSequence = NumpyPadMode.CONSTANT,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n spatial_size: the spatial size of output data after padding.\n If its components have non-positive values, the corresponding size of input image will be used.\n method: {``\"symmetric\"``, ``\"end\"``}\n Pad image symmetric on every side or only pad at the end sides. Defaults to ``\"symmetric\"``.\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function. Defaults to ``\"constant\"``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.mode = ensure_tuple_rep(mode, len(self.keys))\n self.padder = SpatialPad(spatial_size, method)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key, m in self.key_iterator(d, self.mode):\n self.push_transform(d, key, extra_info={\"mode\": m.value if isinstance(m, Enum) else m})\n d[key] = self.padder(d[key], mode=m)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_BorderPadd_BorderPadd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_BorderPadd_BorderPadd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 154, "end_line": 199, "span_ids": ["BorderPadd.__init__", "BorderPadd.__call__", "BorderPadd"], "tokens": 616}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class BorderPadd(MapTransform, InvertibleTransform):\n \"\"\"\n Pad the input data by adding specified borders to every dimension.\n Dictionary-based wrapper of :py:class:`monai.transforms.BorderPad`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n spatial_border: Union[Sequence[int], int],\n mode: NumpyPadModeSequence = NumpyPadMode.CONSTANT,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n spatial_border: specified size for every spatial border. it can be 3 shapes:\n\n - single int number, pad all the borders with the same size.\n - length equals the length of image shape, pad every spatial dimension separately.\n for example, image shape(CHW) is [1, 4, 4], spatial_border is [2, 1],\n pad every border of H dim with 2, pad every border of W dim with 1, result shape is [1, 8, 6].\n - length equals 2 x (length of image shape), pad every border of every dimension separately.\n for example, image shape(CHW) is [1, 4, 4], spatial_border is [1, 2, 3, 4], pad top of H dim with 1,\n pad bottom of H dim with 2, pad left of W dim with 3, pad right of W dim with 4.\n the result shape is [1, 7, 11].\n\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function. Defaults to ``\"constant\"``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.mode = ensure_tuple_rep(mode, len(self.keys))\n self.padder = BorderPad(spatial_border=spatial_border)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key, m in self.key_iterator(d, self.mode):\n self.push_transform(d, key, extra_info={\"mode\": m.value if isinstance(m, Enum) else m})\n d[key] = self.padder(d[key], mode=m)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_DivisiblePadd_DivisiblePadd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_DivisiblePadd_DivisiblePadd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 226, "end_line": 265, "span_ids": ["DivisiblePadd.__init__", "DivisiblePadd", "DivisiblePadd.__call__"], "tokens": 466}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DivisiblePadd(MapTransform, InvertibleTransform):\n \"\"\"\n Pad the input data, so that the spatial sizes are divisible by `k`.\n Dictionary-based wrapper of :py:class:`monai.transforms.DivisiblePad`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n k: Union[Sequence[int], int],\n mode: NumpyPadModeSequence = NumpyPadMode.CONSTANT,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n k: the target k for each spatial dimension.\n if `k` is negative or 0, the original size is preserved.\n if `k` is an int, the same `k` be applied to all the input spatial dimensions.\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function. Defaults to ``\"constant\"``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n allow_missing_keys: don't raise exception if key is missing.\n\n See also :py:class:`monai.transforms.SpatialPad`\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.mode = ensure_tuple_rep(mode, len(self.keys))\n self.padder = DivisiblePad(k=k)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key, m in self.key_iterator(d, self.mode):\n self.push_transform(d, key, extra_info={\"mode\": m.value if isinstance(m, Enum) else m})\n d[key] = self.padder(d[key], mode=m)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialCropd_SpatialCropd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialCropd_SpatialCropd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 286, "end_line": 327, "span_ids": ["SpatialCropd", "SpatialCropd.__call__", "SpatialCropd.__init__"], "tokens": 413}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SpatialCropd(MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.SpatialCrop`.\n General purpose cropper to produce sub-volume region of interest (ROI).\n It can support to crop ND spatial (channel-first) data.\n\n The cropped region can be parameterised in various ways:\n - a list of slices for each spatial dimension (allows for use of -ve indexing and `None`)\n - a spatial center and size\n - the start and end coordinates of the ROI\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n roi_center: Optional[Sequence[int]] = None,\n roi_size: Optional[Sequence[int]] = None,\n roi_start: Optional[Sequence[int]] = None,\n roi_end: Optional[Sequence[int]] = None,\n roi_slices: Optional[Sequence[slice]] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n roi_center: voxel coordinates for center of the crop ROI.\n roi_size: size of the crop ROI.\n roi_start: voxel coordinates for start of the crop ROI.\n roi_end: voxel coordinates for end of the crop ROI.\n roi_slices: list of slices for each of the spatial dimensions.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.cropper = SpatialCrop(roi_center, roi_size, roi_start, roi_end, roi_slices)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n self.push_transform(d, key)\n d[key] = self.cropper(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_CenterSpatialCropd_CenterSpatialCropd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_CenterSpatialCropd_CenterSpatialCropd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 351, "end_line": 375, "span_ids": ["CenterSpatialCropd.__call__", "CenterSpatialCropd", "CenterSpatialCropd.__init__"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CenterSpatialCropd(MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.CenterSpatialCrop`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: monai.transforms.MapTransform\n roi_size: the size of the crop region e.g. [224,224,128]\n If its components have non-positive values, the corresponding size of input image will be used.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self, keys: KeysCollection, roi_size: Union[Sequence[int], int], allow_missing_keys: bool = False\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.cropper = CenterSpatialCrop(roi_size)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n orig_size = d[key].shape[1:]\n d[key] = self.cropper(d[key])\n self.push_transform(d, key, orig_size=orig_size)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd_RandSpatialCropd.__init__.self._size.None": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd_RandSpatialCropd.__init__.self._size.None", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 399, "end_line": 431, "span_ids": ["RandSpatialCropd", "RandSpatialCropd.__init__"], "tokens": 370}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandSpatialCropd(Randomizable, MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based version :py:class:`monai.transforms.RandSpatialCrop`.\n Crop image with random size or specific size ROI. It can crop at a random position as\n center or at the image center. And allows to set the minimum size to limit the randomly\n generated ROI. Suppose all the expected fields specified by `keys` have same shape.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: monai.transforms.MapTransform\n roi_size: if `random_size` is True, it specifies the minimum crop region.\n if `random_size` is False, it specifies the expected ROI size to crop. e.g. [224, 224, 128]\n If its components have non-positive values, the corresponding size of input image will be used.\n random_center: crop at random position as center or the image center.\n random_size: crop with random size or specific size ROI.\n The actual size is sampled from `randint(roi_size, img_size)`.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n roi_size: Union[Sequence[int], int],\n random_center: bool = True,\n random_size: bool = True,\n allow_missing_keys: bool = False,\n ) -> None:\n MapTransform.__init__(self, keys, allow_missing_keys)\n self.roi_size = roi_size\n self.random_center = random_center\n self.random_size = random_size\n self._slices: Optional[Tuple[slice, ...]] = None\n self._size: Optional[Sequence[int]] = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.__call___RandCropByPosNegLabeld.__call__.return.results": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.__call___RandCropByPosNegLabeld.__call__.return.results", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 794, "end_line": 823, "span_ids": ["RandCropByPosNegLabeld.__call__"], "tokens": 359}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandCropByPosNegLabeld(Randomizable, MapTransform):\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> List[Dict[Hashable, np.ndarray]]:\n d = dict(data)\n label = d[self.label_key]\n image = d[self.image_key] if self.image_key else None\n fg_indices = d.get(self.fg_indices_key) if self.fg_indices_key is not None else None\n bg_indices = d.get(self.bg_indices_key) if self.bg_indices_key is not None else None\n\n self.randomize(label, fg_indices, bg_indices, image)\n if not isinstance(self.spatial_size, tuple):\n raise AssertionError\n if self.centers is None:\n raise AssertionError\n results: List[Dict[Hashable, np.ndarray]] = [{} for _ in range(self.num_samples)]\n\n for i, center in enumerate(self.centers):\n for key in self.key_iterator(d):\n img = d[key]\n cropper = SpatialCrop(roi_center=tuple(center), roi_size=self.spatial_size) # type: ignore\n results[i][key] = cropper(img)\n # fill in the extra keys with unmodified data\n for key in set(data.keys()).difference(set(self.keys)):\n results[i][key] = data[key]\n # add `patch_index` to the meta data\n for key in self.key_iterator(d):\n meta_data_key = f\"{key}_{self.meta_key_postfix}\"\n if meta_data_key not in results[i]:\n results[i][meta_data_key] = {} # type: ignore\n results[i][meta_data_key][Key.PATCH_INDEX] = i\n\n return results", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialPadD_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_SpatialPadD_", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 944, "end_line": 956, "span_ids": ["impl:5"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "SpatialPadD = SpatialPadDict = SpatialPadd\nBorderPadD = BorderPadDict = BorderPadd\nDivisiblePadD = DivisiblePadDict = DivisiblePadd\nSpatialCropD = SpatialCropDict = SpatialCropd\nCenterSpatialCropD = CenterSpatialCropDict = CenterSpatialCropd\nRandSpatialCropD = RandSpatialCropDict = RandSpatialCropd\nRandSpatialCropSamplesD = RandSpatialCropSamplesDict = RandSpatialCropSamplesd\nCropForegroundD = CropForegroundDict = CropForegroundd\nRandWeightedCropD = RandWeightedCropDict = RandWeightedCropd\nRandCropByPosNegLabelD = RandCropByPosNegLabelDict = RandCropByPosNegLabeld\nResizeWithPadOrCropD = ResizeWithPadOrCropDict = ResizeWithPadOrCropd\nBoundingRectD = BoundingRectDict = BoundingRectd", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/__init__.py__": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/__init__.py__", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 11, "end_line": 11, "span_ids": [], "tokens": 0}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandScaleIntensity_RandScaleIntensity.__call__.return.scaler_img_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandScaleIntensity_RandScaleIntensity.__call__.return.scaler_img_", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 278, "end_line": 313, "span_ids": ["RandScaleIntensity.__init__", "RandScaleIntensity", "RandScaleIntensity.randomize", "RandScaleIntensity.__call__"], "tokens": 330}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandScaleIntensity(RandomizableTransform):\n \"\"\"\n Randomly scale the intensity of input image by ``v = v * (1 + factor)`` where the `factor`\n is randomly picked.\n \"\"\"\n\n def __init__(self, factors: Union[Tuple[float, float], float], prob: float = 0.1) -> None:\n \"\"\"\n Args:\n factors: factor range to randomly scale by ``v = v * (1 + factor)``.\n if single number, factor value is picked from (-factors, factors).\n prob: probability of scale.\n\n \"\"\"\n RandomizableTransform.__init__(self, prob)\n if isinstance(factors, (int, float)):\n self.factors = (min(-factors, factors), max(-factors, factors))\n else:\n if len(factors) != 2:\n raise AssertionError(\"factors should be a number or pair of numbers.\")\n self.factors = (min(factors), max(factors))\n self.factor = self.factors[0]\n\n def randomize(self, data: Optional[Any] = None) -> None:\n self.factor = self.R.uniform(low=self.factors[0], high=self.factors[1])\n super().randomize(None)\n\n def __call__(self, img: np.ndarray) -> np.ndarray:\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n self.randomize()\n if not self._do_transform:\n return img\n scaler = ScaleIntensity(minv=None, maxv=None, factor=self.factor)\n return scaler(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRange_ScaleIntensityRange.__init__.self.clip.clip": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRange_ScaleIntensityRange.__init__.self.clip.clip", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 254, "end_line": 272, "span_ids": ["ScaleIntensityRange", "ScaleIntensityRange.__init__"], "tokens": 165}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensityRange(Transform):\n \"\"\"\n Apply specific intensity scaling to the whole numpy array.\n Scaling from [a_min, a_max] to [b_min, b_max] with clip option.\n\n Args:\n a_min: intensity original range min.\n a_max: intensity original range max.\n b_min: intensity target range min.\n b_max: intensity target range max.\n clip: whether to perform clip after scaling.\n \"\"\"\n\n def __init__(self, a_min: float, a_max: float, b_min: float, b_max: float, clip: bool = False) -> None:\n self.a_min = a_min\n self.a_max = a_max\n self.b_min = b_min\n self.b_max = b_max\n self.clip = clip", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRange.__call___ScaleIntensityRange.__call__.return.img": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRange.__call___ScaleIntensityRange.__call__.return.img", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 526, "end_line": 538, "span_ids": ["ScaleIntensityRange.__call__"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensityRange(Transform):\n\n def __call__(self, img: np.ndarray):\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n if self.a_max - self.a_min == 0.0:\n warn(\"Divide by zero (a_min == a_max)\", Warning)\n return img - self.a_min + self.b_min\n\n img = (img - self.a_min) / (self.a_max - self.a_min)\n img = img * (self.b_max - self.b_min) + self.b_min\n if self.clip:\n img = np.asarray(np.clip(img, self.b_min, self.b_max))\n return img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_AdjustContrast_AdjustContrast.__call__.return.np_power_img_img_min_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_AdjustContrast_AdjustContrast.__call__.return.np_power_img_img_min_", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 541, "end_line": 563, "span_ids": ["AdjustContrast.__init__", "AdjustContrast", "AdjustContrast.__call__"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AdjustContrast(Transform):\n \"\"\"\n Changes image intensity by gamma. Each pixel/voxel intensity is updated as::\n\n x = ((x - min) / intensity_range) ^ gamma * intensity_range + min\n\n Args:\n gamma: gamma value to adjust the contrast as function.\n \"\"\"\n\n def __init__(self, gamma: float) -> None:\n if not isinstance(gamma, (int, float)):\n raise AssertionError(\"gamma must be a float or int number.\")\n self.gamma = gamma\n\n def __call__(self, img: np.ndarray):\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n epsilon = 1e-7\n img_min = img.min()\n img_range = img.max() - img_min\n return np.power(((img - img_min) / float(img_range + epsilon)), self.gamma) * img_range + img_min", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandAdjustContrast_RandAdjustContrast.__call__.return.adjuster_img_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandAdjustContrast_RandAdjustContrast.__call__.return.adjuster_img_", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 566, "end_line": 608, "span_ids": ["RandAdjustContrast.randomize", "RandAdjustContrast", "RandAdjustContrast.__call__", "RandAdjustContrast.__init__"], "tokens": 377}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandAdjustContrast(RandomizableTransform):\n \"\"\"\n Randomly changes image intensity by gamma. Each pixel/voxel intensity is updated as::\n\n x = ((x - min) / intensity_range) ^ gamma * intensity_range + min\n\n Args:\n prob: Probability of adjustment.\n gamma: Range of gamma values.\n If single number, value is picked from (0.5, gamma), default is (0.5, 4.5).\n \"\"\"\n\n def __init__(self, prob: float = 0.1, gamma: Union[Sequence[float], float] = (0.5, 4.5)) -> None:\n RandomizableTransform.__init__(self, prob)\n\n if isinstance(gamma, (int, float)):\n if gamma <= 0.5:\n raise AssertionError(\n \"if gamma is single number, must greater than 0.5 and value is picked from (0.5, gamma)\"\n )\n self.gamma = (0.5, gamma)\n else:\n if len(gamma) != 2:\n raise AssertionError(\"gamma should be a number or pair of numbers.\")\n self.gamma = (min(gamma), max(gamma))\n\n self.gamma_value = None\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.gamma_value = self.R.uniform(low=self.gamma[0], high=self.gamma[1])\n\n def __call__(self, img: np.ndarray) -> np.ndarray:\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n self.randomize()\n if self.gamma_value is None:\n raise AssertionError\n if not self._do_transform:\n return img\n adjuster = AdjustContrast(self.gamma_value)\n return adjuster(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles_ScaleIntensityRangePercentiles._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles_ScaleIntensityRangePercentiles._", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 354, "end_line": 407, "span_ids": ["ScaleIntensityRangePercentiles"], "tokens": 641}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensityRangePercentiles(Transform):\n \"\"\"\n Apply range scaling to a numpy array based on the intensity distribution of the input.\n\n By default this transform will scale from [lower_intensity_percentile, upper_intensity_percentile] to [b_min, b_max], where\n {lower,upper}_intensity_percentile are the intensity values at the corresponding percentiles of ``img``.\n\n The ``relative`` parameter can also be set to scale from [lower_intensity_percentile, upper_intensity_percentile] to the\n lower and upper percentiles of the output range [b_min, b_max]\n\n For example:\n\n .. code-block:: python\n :emphasize-lines: 11, 22\n\n image = np.array(\n [[[1, 2, 3, 4, 5],\n [1, 2, 3, 4, 5],\n [1, 2, 3, 4, 5],\n [1, 2, 3, 4, 5],\n [1, 2, 3, 4, 5],\n [1, 2, 3, 4, 5]]])\n\n # Scale from lower and upper image intensity percentiles\n # to output range [b_min, b_max]\n scaler = ScaleIntensityRangePercentiles(10, 90, 0, 200, False, False)\n print(scaler(image))\n [[[0., 50., 100., 150., 200.],\n [0., 50., 100., 150., 200.],\n [0., 50., 100., 150., 200.],\n [0., 50., 100., 150., 200.],\n [0., 50., 100., 150., 200.],\n [0., 50., 100., 150., 200.]]]\n\n # Scale from lower and upper image intensity percentiles\n # to lower and upper percentiles of the output range [b_min, b_max]\n rel_scaler = ScaleIntensityRangePercentiles(10, 90, 0, 200, False, True)\n print(rel_scaler(image))\n [[[20., 60., 100., 140., 180.],\n [20., 60., 100., 140., 180.],\n [20., 60., 100., 140., 180.],\n [20., 60., 100., 140., 180.],\n [20., 60., 100., 140., 180.],\n [20., 60., 100., 140., 180.]]]\n\n\n Args:\n lower: lower intensity percentile.\n upper: upper intensity percentile.\n b_min: intensity target range min.\n b_max: intensity target range max.\n clip: whether to perform clip after scaling.\n relative: whether to scale to the corresponding percentiles of [b_min, b_max].\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles.__init___ScaleIntensityRangePercentiles.__init__.self.relative.relative": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles.__init___ScaleIntensityRangePercentiles.__init__.self.relative.relative", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 666, "end_line": 678, "span_ids": ["ScaleIntensityRangePercentiles.__init__"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensityRangePercentiles(Transform):\n\n def __init__(\n self, lower: float, upper: float, b_min: float, b_max: float, clip: bool = False, relative: bool = False\n ) -> None:\n if lower < 0.0 or lower > 100.0:\n raise AssertionError(\"Percentiles must be in the range [0, 100]\")\n if upper < 0.0 or upper > 100.0:\n raise AssertionError(\"Percentiles must be in the range [0, 100]\")\n self.lower = lower\n self.upper = upper\n self.b_min = b_min\n self.b_max = b_max\n self.clip = clip\n self.relative = relative", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles.__call___ScaleIntensityRangePercentiles.__call__.return.img": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensityRangePercentiles.__call___ScaleIntensityRangePercentiles.__call__.return.img", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 680, "end_line": 699, "span_ids": ["ScaleIntensityRangePercentiles.__call__"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensityRangePercentiles(Transform):\n\n def __call__(self, img: np.ndarray):\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n a_min = np.percentile(img, self.lower)\n a_max = np.percentile(img, self.upper)\n b_min = self.b_min\n b_max = self.b_max\n\n if self.relative:\n b_min = ((self.b_max - self.b_min) * (self.lower / 100.0)) + self.b_min\n b_max = ((self.b_max - self.b_min) * (self.upper / 100.0)) + self.b_min\n\n scalar = ScaleIntensityRange(a_min=a_min, a_max=a_max, b_min=b_min, b_max=b_max, clip=False)\n img = scalar(img)\n\n if self.clip:\n img = np.asarray(np.clip(img, self.b_min, self.b_max))\n\n return img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ShiftIntensityd_ShiftIntensityd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ShiftIntensityd_ShiftIntensityd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 155, "end_line": 175, "span_ids": ["ShiftIntensityd", "ShiftIntensityd.__call__", "ShiftIntensityd.__init__"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ShiftIntensityd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.ShiftIntensity`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, offset: float, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n offset: offset value to shift the intensity of image.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.shifter = ShiftIntensity(offset)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.shifter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandShiftIntensityd_RandShiftIntensityd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandShiftIntensityd_RandShiftIntensityd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 178, "end_line": 223, "span_ids": ["RandShiftIntensityd.randomize", "RandShiftIntensityd.__init__", "RandShiftIntensityd.__call__", "RandShiftIntensityd"], "tokens": 417}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandShiftIntensityd(RandomizableTransform, MapTransform):\n \"\"\"\n Dictionary-based version :py:class:`monai.transforms.RandShiftIntensity`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n offsets: Union[Tuple[float, float], float],\n prob: float = 0.1,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n offsets: offset range to randomly shift.\n if single number, offset value is picked from (-offsets, offsets).\n prob: probability of rotating.\n (Default 0.1, with 10% probability it returns a rotated array.)\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n\n if isinstance(offsets, (int, float)):\n self.offsets = (min(-offsets, offsets), max(-offsets, offsets))\n else:\n if len(offsets) != 2:\n raise AssertionError(\"offsets should be a number or pair of numbers.\")\n self.offsets = (min(offsets), max(offsets))\n self._offset = self.offsets[0]\n\n def randomize(self, data: Optional[Any] = None) -> None:\n self._offset = self.R.uniform(low=self.offsets[0], high=self.offsets[1])\n super().randomize(None)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n self.randomize()\n if not self._do_transform:\n return d\n shifter = ShiftIntensity(self._offset)\n for key in self.key_iterator(d):\n d[key] = shifter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityd_ScaleIntensityd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityd_ScaleIntensityd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 317, "end_line": 350, "span_ids": ["ScaleIntensityd", "ScaleIntensityd.__call__", "ScaleIntensityd.__init__"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensityd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.ScaleIntensity`.\n Scale the intensity of input image to the given value range (minv, maxv).\n If `minv` and `maxv` not provided, use `factor` to scale image by ``v = v * (1 + factor)``.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n minv: Optional[float] = 0.0,\n maxv: Optional[float] = 1.0,\n factor: Optional[float] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n minv: minimum value of output data.\n maxv: maximum value of output data.\n factor: factor scale by ``v = v * (1 + factor)``. In order to use\n this parameter, please set `minv` and `maxv` into None.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.scaler = ScaleIntensity(minv, maxv, factor)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.scaler(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandScaleIntensityd_RandScaleIntensityd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandScaleIntensityd_RandScaleIntensityd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 353, "end_line": 399, "span_ids": ["RandScaleIntensityd.__call__", "RandScaleIntensityd.__init__", "RandScaleIntensityd", "RandScaleIntensityd.randomize"], "tokens": 433}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandScaleIntensityd(RandomizableTransform, MapTransform):\n \"\"\"\n Dictionary-based version :py:class:`monai.transforms.RandScaleIntensity`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n factors: Union[Tuple[float, float], float],\n prob: float = 0.1,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n factors: factor range to randomly scale by ``v = v * (1 + factor)``.\n if single number, factor value is picked from (-factors, factors).\n prob: probability of rotating.\n (Default 0.1, with 10% probability it returns a rotated array.)\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n\n if isinstance(factors, (int, float)):\n self.factors = (min(-factors, factors), max(-factors, factors))\n else:\n if len(factors) != 2:\n raise AssertionError(\"factors should be a number or pair of numbers.\")\n self.factors = (min(factors), max(factors))\n self.factor = self.factors[0]\n\n def randomize(self, data: Optional[Any] = None) -> None:\n self.factor = self.R.uniform(low=self.factors[0], high=self.factors[1])\n super().randomize(None)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n self.randomize()\n if not self._do_transform:\n return d\n scaler = ScaleIntensity(minv=None, maxv=None, factor=self.factor)\n for key in self.key_iterator(d):\n d[key] = scaler(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_NormalizeIntensityd_NormalizeIntensityd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_NormalizeIntensityd_NormalizeIntensityd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 446, "end_line": 481, "span_ids": ["NormalizeIntensityd.__call__", "NormalizeIntensityd.__init__", "NormalizeIntensityd"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NormalizeIntensityd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.NormalizeIntensity`.\n This transform can normalize only non-zero values or entire image, and can also calculate\n mean and std on each channel separately.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: monai.transforms.MapTransform\n subtrahend: the amount to subtract by (usually the mean)\n divisor: the amount to divide by (usually the standard deviation)\n nonzero: whether only normalize non-zero values.\n channel_wise: if using calculated mean and std, calculate on each channel separately\n or calculate on the entire image directly.\n dtype: output data type, defaults to float32.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n subtrahend: Optional[np.ndarray] = None,\n divisor: Optional[np.ndarray] = None,\n nonzero: bool = False,\n channel_wise: bool = False,\n dtype: DtypeLike = np.float32,\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.normalizer = NormalizeIntensity(subtrahend, divisor, nonzero, channel_wise, dtype)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.normalizer(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ThresholdIntensityd_ThresholdIntensityd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ThresholdIntensityd_ThresholdIntensityd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 484, "end_line": 512, "span_ids": ["ThresholdIntensityd.__init__", "ThresholdIntensityd", "ThresholdIntensityd.__call__"], "tokens": 242}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ThresholdIntensityd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.ThresholdIntensity`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: monai.transforms.MapTransform\n threshold: the threshold to filter intensity values.\n above: filter values above the threshold or below the threshold, default is True.\n cval: value to fill the remaining parts of the image, default is 0.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n threshold: float,\n above: bool = True,\n cval: float = 0.0,\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.filter = ThresholdIntensity(threshold, above, cval)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.filter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityRanged_ScaleIntensityRanged.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityRanged_ScaleIntensityRanged.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 515, "end_line": 547, "span_ids": ["ScaleIntensityRanged.__call__", "ScaleIntensityRanged.__init__", "ScaleIntensityRanged"], "tokens": 260}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensityRanged(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.ScaleIntensityRange`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: monai.transforms.MapTransform\n a_min: intensity original range min.\n a_max: intensity original range max.\n b_min: intensity target range min.\n b_max: intensity target range max.\n clip: whether to perform clip after scaling.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n a_min: float,\n a_max: float,\n b_min: float,\n b_max: float,\n clip: bool = False,\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.scaler = ScaleIntensityRange(a_min, a_max, b_min, b_max, clip)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.scaler(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_AdjustContrastd_AdjustContrastd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_AdjustContrastd_AdjustContrastd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 550, "end_line": 572, "span_ids": ["AdjustContrastd", "AdjustContrastd.__call__", "AdjustContrastd.__init__"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AdjustContrastd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.AdjustContrast`.\n Changes image intensity by gamma. Each pixel/voxel intensity is updated as:\n\n `x = ((x - min) / intensity_range) ^ gamma * intensity_range + min`\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: monai.transforms.MapTransform\n gamma: gamma value to adjust the contrast as function.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, gamma: float, allow_missing_keys: bool = False) -> None:\n super().__init__(keys, allow_missing_keys)\n self.adjuster = AdjustContrast(gamma)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.adjuster(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandAdjustContrastd_RandAdjustContrastd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandAdjustContrastd_RandAdjustContrastd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 575, "end_line": 628, "span_ids": ["RandAdjustContrastd", "RandAdjustContrastd.randomize", "RandAdjustContrastd.__call__", "RandAdjustContrastd.__init__"], "tokens": 495}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandAdjustContrastd(RandomizableTransform, MapTransform):\n \"\"\"\n Dictionary-based version :py:class:`monai.transforms.RandAdjustContrast`.\n Randomly changes image intensity by gamma. Each pixel/voxel intensity is updated as:\n\n `x = ((x - min) / intensity_range) ^ gamma * intensity_range + min`\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: monai.transforms.MapTransform\n prob: Probability of adjustment.\n gamma: Range of gamma values.\n If single number, value is picked from (0.5, gamma), default is (0.5, 4.5).\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n prob: float = 0.1,\n gamma: Union[Tuple[float, float], float] = (0.5, 4.5),\n allow_missing_keys: bool = False,\n ) -> None:\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n\n if isinstance(gamma, (int, float)):\n if gamma <= 0.5:\n raise AssertionError(\n \"if gamma is single number, must greater than 0.5 and value is picked from (0.5, gamma)\"\n )\n self.gamma = (0.5, gamma)\n else:\n if len(gamma) != 2:\n raise AssertionError(\"gamma should be a number or pair of numbers.\")\n self.gamma = (min(gamma), max(gamma))\n\n self.gamma_value: Optional[float] = None\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.gamma_value = self.R.uniform(low=self.gamma[0], high=self.gamma[1])\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n self.randomize()\n if self.gamma_value is None:\n raise AssertionError\n if not self._do_transform:\n return d\n adjuster = AdjustContrast(self.gamma_value)\n for key in self.key_iterator(d):\n d[key] = adjuster(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityRangePercentilesd_ScaleIntensityRangePercentilesd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_ScaleIntensityRangePercentilesd_ScaleIntensityRangePercentilesd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 631, "end_line": 665, "span_ids": ["ScaleIntensityRangePercentilesd", "ScaleIntensityRangePercentilesd.__init__", "ScaleIntensityRangePercentilesd.__call__"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensityRangePercentilesd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.ScaleIntensityRangePercentiles`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: monai.transforms.MapTransform\n lower: lower percentile.\n upper: upper percentile.\n b_min: intensity target range min.\n b_max: intensity target range max.\n clip: whether to perform clip after scaling.\n relative: whether to scale to the corresponding percentiles of [b_min, b_max]\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n lower: float,\n upper: float,\n b_min: float,\n b_max: float,\n clip: bool = False,\n relative: bool = False,\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.scaler = ScaleIntensityRangePercentiles(lower, upper, b_min, b_max, clip, relative)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.scaler(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_MaskIntensityd_MaskIntensityd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_MaskIntensityd_MaskIntensityd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 668, "end_line": 701, "span_ids": ["MaskIntensityd", "MaskIntensityd.__call__", "MaskIntensityd.__init__"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MaskIntensityd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.MaskIntensity`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n mask_data: if mask data is single channel, apply to every channel\n of input image. if multiple channels, the channel number must\n match input data. mask_data will be converted to `bool` values\n by `mask_data > 0` before applying transform to input image.\n if None, will extract the mask data from input data based on `mask_key`.\n mask_key: the key to extract mask data from input dictionary, only works\n when `mask_data` is None.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n mask_data: Optional[np.ndarray] = None,\n mask_key: Optional[str] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.converter = MaskIntensity(mask_data)\n self.mask_key = mask_key if mask_data is None else None\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key], d[self.mask_key]) if self.mask_key is not None else self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianNoiseD_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianNoiseD_", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 960, "end_line": 980, "span_ids": ["impl:3"], "tokens": 307}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "RandGaussianNoiseD = RandGaussianNoiseDict = RandGaussianNoised\nShiftIntensityD = ShiftIntensityDict = ShiftIntensityd\nRandShiftIntensityD = RandShiftIntensityDict = RandShiftIntensityd\nStdShiftIntensityD = StdShiftIntensityDict = StdShiftIntensityd\nRandStdShiftIntensityD = RandStdShiftIntensityDict = RandStdShiftIntensityd\nRandBiasFieldD = RandBiasFieldDict = RandBiasFieldd\nScaleIntensityD = ScaleIntensityDict = ScaleIntensityd\nRandScaleIntensityD = RandScaleIntensityDict = RandScaleIntensityd\nNormalizeIntensityD = NormalizeIntensityDict = NormalizeIntensityd\nThresholdIntensityD = ThresholdIntensityDict = ThresholdIntensityd\nScaleIntensityRangeD = ScaleIntensityRangeDict = ScaleIntensityRanged\nAdjustContrastD = AdjustContrastDict = AdjustContrastd\nRandAdjustContrastD = RandAdjustContrastDict = RandAdjustContrastd\nScaleIntensityRangePercentilesD = ScaleIntensityRangePercentilesDict = ScaleIntensityRangePercentilesd\nMaskIntensityD = MaskIntensityDict = MaskIntensityd\nGaussianSmoothD = GaussianSmoothDict = GaussianSmoothd\nRandGaussianSmoothD = RandGaussianSmoothDict = RandGaussianSmoothd\nGaussianSharpenD = GaussianSharpenDict = GaussianSharpend\nRandGaussianSharpenD = RandGaussianSharpenDict = RandGaussianSharpend\nRandHistogramShiftD = RandHistogramShiftDict = RandHistogramShiftd", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/__init__.py__": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/__init__.py__", "embedding": null, "metadata": {"file_path": "monai/transforms/io/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 11, "end_line": 11, "span_ids": [], "tokens": 0}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/__init__.py__": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/__init__.py__", "embedding": null, "metadata": {"file_path": "monai/transforms/post/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 11, "end_line": 11, "span_ids": [], "tokens": 0}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_Activations_Activations.__init__.self.other.other": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_Activations_Activations.__init__.self.other.other", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 70, "end_line": 92, "span_ids": ["Activations.__init__", "Activations"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Activations(Transform):\n \"\"\"\n Add activation operations to the model output, typically `Sigmoid` or `Softmax`.\n\n Args:\n sigmoid: whether to execute sigmoid function on model output before transform.\n Defaults to ``False``.\n softmax: whether to execute softmax function on model output before transform.\n Defaults to ``False``.\n other: callable function to execute other activation layers, for example:\n `other = lambda x: torch.tanh(x)`. Defaults to ``None``.\n\n Raises:\n TypeError: When ``other`` is not an ``Optional[Callable]``.\n\n \"\"\"\n\n def __init__(self, sigmoid: bool = False, softmax: bool = False, other: Optional[Callable] = None) -> None:\n self.sigmoid = sigmoid\n self.softmax = softmax\n if other is not None and not callable(other):\n raise TypeError(f\"other must be None or callable but is {type(other).__name__}.\")\n self.other = other", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_Activations.__call___Activations.__call__.return.img": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_Activations.__call___Activations.__call__.return.img", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 63, "end_line": 104, "span_ids": ["Activations.__call__"], "tokens": 358}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Activations(Transform):\n\n def __call__(\n self,\n img: torch.Tensor,\n sigmoid: Optional[bool] = None,\n softmax: Optional[bool] = None,\n other: Optional[Callable] = None,\n ) -> torch.Tensor:\n \"\"\"\n Args:\n sigmoid: whether to execute sigmoid function on model output before transform.\n Defaults to ``self.sigmoid``.\n softmax: whether to execute softmax function on model output before transform.\n Defaults to ``self.softmax``.\n other: callable function to execute other activation layers, for example:\n `other = lambda x: torch.tanh(x)`. Defaults to ``self.other``.\n\n Raises:\n ValueError: When ``sigmoid=True`` and ``softmax=True``. Incompatible values.\n TypeError: When ``other`` is not an ``Optional[Callable]``.\n ValueError: When ``self.other=None`` and ``other=None``. Incompatible values.\n\n \"\"\"\n if sigmoid and softmax:\n raise ValueError(\"Incompatible values: sigmoid=True and softmax=True.\")\n if other is not None and not callable(other):\n raise TypeError(f\"other must be None or callable but is {type(other).__name__}.\")\n\n # convert to float as activation must operate on float tensor\n img = img.float()\n if sigmoid or self.sigmoid:\n img = torch.sigmoid(img)\n if softmax or self.softmax:\n # add channel dim if not existing\n if img.ndimension() == 1:\n img = img.unsqueeze(-1)\n img = torch.softmax(img, dim=1)\n\n act_func = self.other if other is None else other\n if act_func is not None:\n img = act_func(img)\n\n return img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_AsDiscrete_AsDiscrete.__init__.self.logit_thresh.logit_thresh": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_AsDiscrete_AsDiscrete.__init__.self.logit_thresh.logit_thresh", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 121, "end_line": 156, "span_ids": ["AsDiscrete.__init__", "AsDiscrete"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AsDiscrete(Transform):\n \"\"\"\n Execute after model forward to transform model output to discrete values.\n It can complete below operations:\n\n - execute `argmax` for input logits values.\n - threshold input value to 0.0 or 1.0.\n - convert input value to One-Hot format\n\n Args:\n argmax: whether to execute argmax function on input data before transform.\n Defaults to ``False``.\n to_onehot: whether to convert input data into the one-hot format.\n Defaults to ``False``.\n n_classes: the number of classes to convert to One-Hot format.\n Defaults to ``None``.\n threshold_values: whether threshold the float value to int number 0 or 1.\n Defaults to ``False``.\n logit_thresh: the threshold value for thresholding operation..\n Defaults to ``0.5``.\n\n \"\"\"\n\n def __init__(\n self,\n argmax: bool = False,\n to_onehot: bool = False,\n n_classes: Optional[int] = None,\n threshold_values: bool = False,\n logit_thresh: float = 0.5,\n ) -> None:\n self.argmax = argmax\n self.to_onehot = to_onehot\n self.n_classes = n_classes\n self.threshold_values = threshold_values\n self.logit_thresh = logit_thresh", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_AsDiscrete.__call___AsDiscrete.__call__.return.img_float_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_AsDiscrete.__call___AsDiscrete.__call__.return.img_float_", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 144, "end_line": 179, "span_ids": ["AsDiscrete.__call__"], "tokens": 337}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AsDiscrete(Transform):\n\n def __call__(\n self,\n img: torch.Tensor,\n argmax: Optional[bool] = None,\n to_onehot: Optional[bool] = None,\n n_classes: Optional[int] = None,\n threshold_values: Optional[bool] = None,\n logit_thresh: Optional[float] = None,\n ) -> torch.Tensor:\n \"\"\"\n Args:\n argmax: whether to execute argmax function on input data before transform.\n Defaults to ``self.argmax``.\n to_onehot: whether to convert input data into the one-hot format.\n Defaults to ``self.to_onehot``.\n n_classes: the number of classes to convert to One-Hot format.\n Defaults to ``self.n_classes``.\n threshold_values: whether threshold the float value to int number 0 or 1.\n Defaults to ``self.threshold_values``.\n logit_thresh: the threshold value for thresholding operation..\n Defaults to ``self.logit_thresh``.\n\n \"\"\"\n if argmax or self.argmax:\n img = torch.argmax(img, dim=1, keepdim=True)\n\n if to_onehot or self.to_onehot:\n _nclasses = self.n_classes if n_classes is None else n_classes\n if not isinstance(_nclasses, int):\n raise AssertionError(\"One of self.n_classes or n_classes must be an integer\")\n img = one_hot(img, _nclasses)\n\n if threshold_values or self.threshold_values:\n img = img >= (self.logit_thresh if logit_thresh is None else logit_thresh)\n\n return img.float()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent_KeepLargestConnectedComponent._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent_KeepLargestConnectedComponent._", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 195, "end_line": 239, "span_ids": ["KeepLargestConnectedComponent"], "tokens": 782}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class KeepLargestConnectedComponent(Transform):\n \"\"\"\n Keeps only the largest connected component in the image.\n This transform can be used as a post-processing step to clean up over-segment areas in model output.\n\n The input is assumed to be a PyTorch Tensor:\n 1) With shape (batch_size, 1, spatial_dim1[, spatial_dim2, ...]) and the values correspond to expected labels.\n 2) With shape (batch_size, C, spatial_dim1[, spatial_dim2, ...]) and the values should be 0, 1 on each labels.\n\n Note:\n For single channel data, 0 will be treated as background and the over-segment pixels will be set to 0.\n For one-hot data, the over-segment pixels will be set to 0 in its channel.\n\n For example:\n Use KeepLargestConnectedComponent with applied_labels=[1], connectivity=1::\n\n [1, 0, 0] [0, 0, 0]\n [0, 1, 1] => [0, 1 ,1]\n [0, 1, 1] [0, 1, 1]\n\n Use KeepLargestConnectedComponent with applied_labels[1, 2], independent=False, connectivity=1::\n\n [0, 0, 1, 0 ,0] [0, 0, 1, 0 ,0]\n [0, 2, 1, 1 ,1] [0, 2, 1, 1 ,1]\n [1, 2, 1, 0 ,0] => [1, 2, 1, 0 ,0]\n [1, 2, 0, 1 ,0] [1, 2, 0, 0 ,0]\n [2, 2, 0, 0 ,2] [2, 2, 0, 0 ,0]\n\n Use KeepLargestConnectedComponent with applied_labels[1, 2], independent=True, connectivity=1::\n\n [0, 0, 1, 0 ,0] [0, 0, 1, 0 ,0]\n [0, 2, 1, 1 ,1] [0, 2, 1, 1 ,1]\n [1, 2, 1, 0 ,0] => [0, 2, 1, 0 ,0]\n [1, 2, 0, 1 ,0] [0, 2, 0, 0 ,0]\n [2, 2, 0, 0 ,2] [2, 2, 0, 0 ,0]\n\n Use KeepLargestConnectedComponent with applied_labels[1, 2], independent=False, connectivity=2::\n\n [0, 0, 1, 0 ,0] [0, 0, 1, 0 ,0]\n [0, 2, 1, 1 ,1] [0, 2, 1, 1 ,1]\n [1, 2, 1, 0 ,0] => [1, 2, 1, 0 ,0]\n [1, 2, 0, 1 ,0] [1, 2, 0, 1 ,0]\n [2, 2, 0, 0 ,2] [2, 2, 0, 0 ,2]\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent.__init___KeepLargestConnectedComponent.__init__.self.connectivity.connectivity": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent.__init___KeepLargestConnectedComponent.__init__.self.connectivity.connectivity", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 241, "end_line": 259, "span_ids": ["KeepLargestConnectedComponent.__init__"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class KeepLargestConnectedComponent(Transform):\n\n def __init__(\n self, applied_labels: Union[Sequence[int], int], independent: bool = True, connectivity: Optional[int] = None\n ) -> None:\n \"\"\"\n Args:\n applied_labels: Labels for applying the connected component on.\n If only one channel. The pixel whose value is not in this list will remain unchanged.\n If the data is in one-hot format, this is used to determine what channels to apply.\n independent: consider several labels as a whole or independent, default is `True`.\n Example use case would be segment label 1 is liver and label 2 is liver tumor, in that case\n you want this \"independent\" to be specified as False.\n connectivity: Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor.\n Accepted values are ranging from 1 to input.ndim. If ``None``, a full\n connectivity of ``input.ndim`` is used.\n \"\"\"\n super().__init__()\n self.applied_labels = ensure_tuple(applied_labels)\n self.independent = independent\n self.connectivity = connectivity", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent.__call___KeepLargestConnectedComponent.__call__.return.output": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_KeepLargestConnectedComponent.__call___KeepLargestConnectedComponent.__call__.return.output", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 273, "end_line": 315, "span_ids": ["KeepLargestConnectedComponent.__call__"], "tokens": 446}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class KeepLargestConnectedComponent(Transform):\n\n def __call__(self, img: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n img: shape must be (batch_size, C, spatial_dim1[, spatial_dim2, ...]).\n\n Returns:\n A PyTorch Tensor with shape (batch_size, C, spatial_dim1[, spatial_dim2, ...]).\n \"\"\"\n channel_dim = 1\n if img.shape[channel_dim] == 1:\n\n img = torch.squeeze(img, dim=channel_dim)\n\n if self.independent:\n for i in self.applied_labels:\n foreground = (img == i).type(torch.uint8)\n mask = get_largest_connected_component_mask(foreground, self.connectivity)\n img[foreground != mask] = 0\n else:\n foreground = torch.zeros_like(img)\n for i in self.applied_labels:\n foreground += (img == i).type(torch.uint8)\n mask = get_largest_connected_component_mask(foreground, self.connectivity)\n img[foreground != mask] = 0\n output = torch.unsqueeze(img, dim=channel_dim)\n else:\n # one-hot data is assumed to have binary value in each channel\n if self.independent:\n for i in self.applied_labels:\n foreground = img[:, i, ...].type(torch.uint8)\n mask = get_largest_connected_component_mask(foreground, self.connectivity)\n img[:, i, ...][foreground != mask] = 0\n else:\n applied_img = img[:, self.applied_labels, ...].type(torch.uint8)\n foreground = torch.any(applied_img, dim=channel_dim)\n mask = get_largest_connected_component_mask(foreground, self.connectivity)\n background_mask = torch.unsqueeze(foreground != mask, dim=channel_dim)\n background_mask = torch.repeat_interleave(background_mask, len(self.applied_labels), dim=channel_dim)\n applied_img[background_mask] = 0\n img[:, self.applied_labels, ...] = applied_img.type(img.type())\n output = img\n\n return output", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_LabelToContour_LabelToContour.__init__.self.kernel_type.kernel_type": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_LabelToContour_LabelToContour.__init__.self.kernel_type.kernel_type", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 318, "end_line": 334, "span_ids": ["LabelToContour", "LabelToContour.__init__"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LabelToContour(Transform):\n \"\"\"\n Return the contour of binary input images that only compose of 0 and 1, with Laplace kernel\n set as default for edge detection. Typical usage is to plot the edge of label or segmentation output.\n\n Args:\n kernel_type: the method applied to do edge detection, default is \"Laplace\".\n\n Raises:\n NotImplementedError: When ``kernel_type`` is not \"Laplace\".\n\n \"\"\"\n\n def __init__(self, kernel_type: str = \"Laplace\") -> None:\n if kernel_type != \"Laplace\":\n raise NotImplementedError('Currently only kernel_type=\"Laplace\" is supported.')\n self.kernel_type = kernel_type", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_LabelToContour.__call___LabelToContour.__call__.return.contour_img": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_LabelToContour.__call___LabelToContour.__call__.return.contour_img", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 336, "end_line": 366, "span_ids": ["LabelToContour.__call__"], "tokens": 428}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LabelToContour(Transform):\n\n def __call__(self, img: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n img: torch tensor data to extract the contour, with shape: [batch_size, channels, height, width[, depth]]\n\n Raises:\n ValueError: When ``image`` ndim is not one of [4, 5].\n\n Returns:\n A torch tensor with the same shape as img, note:\n 1. it's the binary classification result of whether a pixel is edge or not.\n 2. in order to keep the original shape of mask image, we use padding as default.\n 3. the edge detection is just approximate because it defects inherent to Laplace kernel,\n ideally the edge should be thin enough, but now it has a thickness.\n\n \"\"\"\n channels = img.shape[1]\n if img.ndimension() == 4:\n kernel = torch.tensor([[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]], dtype=torch.float32, device=img.device)\n kernel = kernel.repeat(channels, 1, 1, 1)\n contour_img = F.conv2d(img, kernel, bias=None, stride=1, padding=1, dilation=1, groups=channels)\n elif img.ndimension() == 5:\n kernel = -1 * torch.ones(3, 3, 3, dtype=torch.float32, device=img.device)\n kernel[1, 1, 1] = 26\n kernel = kernel.repeat(channels, 1, 1, 1, 1)\n contour_img = F.conv3d(img, kernel, bias=None, stride=1, padding=1, dilation=1, groups=channels)\n else:\n raise ValueError(f\"Unsupported img dimension: {img.ndimension()}, available options are [4, 5].\")\n\n contour_img.clamp_(min=0.0, max=1.0)\n return contour_img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_MeanEnsemble_MeanEnsemble.__init__.self.weights.torch_as_tensor_weights_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_MeanEnsemble_MeanEnsemble.__init__.self.weights.torch_as_tensor_weights_", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 369, "end_line": 393, "span_ids": ["MeanEnsemble", "MeanEnsemble.__init__"], "tokens": 420}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MeanEnsemble(Transform):\n \"\"\"\n Execute mean ensemble on the input data.\n The input data can be a list or tuple of PyTorch Tensor with shape: [B, C[, H, W, D]],\n Or a single PyTorch Tensor with shape: [E, B, C[, H, W, D]], the `E` dimension represents\n the output data from different models.\n Typically, the input data is model output of segmentation task or classification task.\n And it also can support to add `weights` for the input data.\n\n Args:\n weights: can be a list or tuple of numbers for input data with shape: [E, B, C, H, W[, D]].\n or a Numpy ndarray or a PyTorch Tensor data.\n the `weights` will be added to input data from highest dimension, for example:\n 1. if the `weights` only has 1 dimension, it will be added to the `E` dimension of input data.\n 2. if the `weights` has 3 dimensions, it will be added to `E`, `B` and `C` dimensions.\n it's a typical practice to add weights for different classes:\n to ensemble 3 segmentation model outputs, every output has 4 channels(classes),\n so the input data shape can be: [3, B, 4, H, W, D].\n and add different `weights` for different classes, so the `weights` shape can be: [3, 1, 4].\n for example: `weights = [[[1, 2, 3, 4]], [[4, 3, 2, 1]], [[1, 1, 1, 1]]]`.\n\n \"\"\"\n\n def __init__(self, weights: Optional[Union[Sequence[float], torch.Tensor, np.ndarray]] = None) -> None:\n self.weights = torch.as_tensor(weights, dtype=torch.float) if weights is not None else None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_Activationsd_Activationsd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_Activationsd_Activationsd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/post/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 63, "end_line": 101, "span_ids": ["Activationsd.__call__", "Activationsd", "Activationsd.__init__"], "tokens": 415}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Activationsd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.AddActivations`.\n Add activation layers to the input data specified by `keys`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n sigmoid: Union[Sequence[bool], bool] = False,\n softmax: Union[Sequence[bool], bool] = False,\n other: Optional[Union[Sequence[Callable], Callable]] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to model output and label.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n sigmoid: whether to execute sigmoid function on model output before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n softmax: whether to execute softmax function on model output before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n other: callable function to execute other activation layers,\n for example: `other = lambda x: torch.tanh(x)`. it also can be a sequence of Callable, each\n element corresponds to a key in ``keys``.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.sigmoid = ensure_tuple_rep(sigmoid, len(self.keys))\n self.softmax = ensure_tuple_rep(softmax, len(self.keys))\n self.other = ensure_tuple_rep(other, len(self.keys))\n self.converter = Activations()\n\n def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> Dict[Hashable, torch.Tensor]:\n d = dict(data)\n for key, sigmoid, softmax, other in self.key_iterator(d, self.sigmoid, self.softmax, self.other):\n d[key] = self.converter(d[key], sigmoid, softmax, other)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_KeepLargestConnectedComponentd_KeepLargestConnectedComponentd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_KeepLargestConnectedComponentd_KeepLargestConnectedComponentd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/post/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 160, "end_line": 196, "span_ids": ["KeepLargestConnectedComponentd.__init__", "KeepLargestConnectedComponentd", "KeepLargestConnectedComponentd.__call__"], "tokens": 372}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class KeepLargestConnectedComponentd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.KeepLargestConnectedComponent`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n applied_labels: Union[Sequence[int], int],\n independent: bool = True,\n connectivity: Optional[int] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n applied_labels: Labels for applying the connected component on.\n If only one channel. The pixel whose value is not in this list will remain unchanged.\n If the data is in one-hot format, this is the channel indices to apply transform.\n independent: consider several labels as a whole or independent, default is `True`.\n Example use case would be segment label 1 is liver and label 2 is liver tumor, in that case\n you want this \"independent\" to be specified as False.\n connectivity: Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor.\n Accepted values are ranging from 1 to input.ndim. If ``None``, a full\n connectivity of ``input.ndim`` is used.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.converter = KeepLargestConnectedComponent(applied_labels, independent, connectivity)\n\n def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> Dict[Hashable, torch.Tensor]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_LabelToContourd_LabelToContourd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_LabelToContourd_LabelToContourd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/post/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 199, "end_line": 220, "span_ids": ["LabelToContourd.__init__", "LabelToContourd", "LabelToContourd.__call__"], "tokens": 208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LabelToContourd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.LabelToContour`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, kernel_type: str = \"Laplace\", allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n kernel_type: the method applied to do edge detection, default is \"Laplace\".\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.converter = LabelToContour(kernel_type=kernel_type)\n\n def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> Dict[Hashable, torch.Tensor]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/__init__.py__": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/__init__.py__", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 11, "end_line": 11, "span_ids": [], "tokens": 0}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Spacing_Spacing.__init__.self.dtype.dtype": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Spacing_Spacing.__init__.self.dtype.dtype", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 77, "end_line": 122, "span_ids": ["Spacing.__init__", "Spacing"], "tokens": 522}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Spacing(Transform):\n \"\"\"\n Resample input image into the specified `pixdim`.\n \"\"\"\n\n def __init__(\n self,\n pixdim: Union[Sequence[float], float],\n diagonal: bool = False,\n mode: Union[GridSampleMode, str] = GridSampleMode.BILINEAR,\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.BORDER,\n align_corners: bool = False,\n dtype: DtypeLike = np.float64,\n ) -> None:\n \"\"\"\n Args:\n pixdim: output voxel spacing.\n diagonal: whether to resample the input to have a diagonal affine matrix.\n If True, the input data is resampled to the following affine::\n\n np.diag((pixdim_0, pixdim_1, ..., pixdim_n, 1))\n\n This effectively resets the volume to the world coordinate system (RAS+ in nibabel).\n The original orientation, rotation, shearing are not preserved.\n\n If False, this transform preserves the axes orientation, orthogonal rotation and\n translation components from the original affine. This option will not flip/swap axes\n of the original data.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"border\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n align_corners: Geometrically, we consider the pixels of the input as squares rather than points.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n dtype: data type for resampling computation. Defaults to ``np.float64`` for best precision.\n If None, use the data type of input data. To be compatible with other modules,\n the output data type is always ``np.float32``.\n \"\"\"\n self.pixdim = np.array(ensure_tuple(pixdim), dtype=np.float64)\n self.diagonal = diagonal\n self.mode: GridSampleMode = GridSampleMode(mode)\n self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)\n self.align_corners = align_corners\n self.dtype = dtype", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Orientation_Orientation.__init__.self.labels.labels": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Orientation_Orientation.__init__.self.labels.labels", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 189, "end_line": 224, "span_ids": ["Orientation.__init__", "Orientation"], "tokens": 390}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Orientation(Transform):\n \"\"\"\n Change the input image's orientation into the specified based on `axcodes`.\n \"\"\"\n\n def __init__(\n self,\n axcodes: Optional[str] = None,\n as_closest_canonical: bool = False,\n labels: Optional[Sequence[Tuple[str, str]]] = tuple(zip(\"LPI\", \"RAS\")),\n ) -> None:\n \"\"\"\n Args:\n axcodes: N elements sequence for spatial ND input's orientation.\n e.g. axcodes='RAS' represents 3D orientation:\n (Left, Right), (Posterior, Anterior), (Inferior, Superior).\n default orientation labels options are: 'L' and 'R' for the first dimension,\n 'P' and 'A' for the second, 'I' and 'S' for the third.\n as_closest_canonical: if True, load the image as closest to canonical axis format.\n labels: optional, None or sequence of (2,) sequences\n (2,) sequences are labels for (beginning, end) of output axis.\n Defaults to ``(('L', 'R'), ('P', 'A'), ('I', 'S'))``.\n\n Raises:\n ValueError: When ``axcodes=None`` and ``as_closest_canonical=True``. Incompatible values.\n\n See Also: `nibabel.orientations.ornt2axcodes`.\n\n \"\"\"\n if axcodes is None and not as_closest_canonical:\n raise ValueError(\"Incompatible values: axcodes=None and as_closest_canonical=True.\")\n if axcodes is not None and as_closest_canonical:\n warnings.warn(\"using as_closest_canonical=True, axcodes ignored.\")\n self.axcodes = axcodes\n self.as_closest_canonical = as_closest_canonical\n self.labels = labels", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Orientation.__call___Orientation.__call__.return.data_array_affine_new_a": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Orientation.__call___Orientation.__call__.return.data_array_affine_new_a", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 246, "end_line": 291, "span_ids": ["Orientation.__call__"], "tokens": 494}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Orientation(Transform):\n\n def __call__(\n self, data_array: np.ndarray, affine: Optional[np.ndarray] = None\n ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:\n \"\"\"\n original orientation of `data_array` is defined by `affine`.\n\n Args:\n data_array: in shape (num_channels, H[, W, ...]).\n affine (matrix): (N+1)x(N+1) original affine matrix for spatially ND `data_array`. Defaults to identity.\n\n Raises:\n ValueError: When ``data_array`` has no spatial dimensions.\n ValueError: When ``axcodes`` spatiality differs from ``data_array``.\n\n Returns:\n data_array (reoriented in `self.axcodes`), original axcodes, current axcodes.\n\n \"\"\"\n sr = data_array.ndim - 1\n if sr <= 0:\n raise ValueError(\"data_array must have at least one spatial dimension.\")\n if affine is None:\n affine = np.eye(sr + 1, dtype=np.float64)\n affine_ = np.eye(sr + 1, dtype=np.float64)\n else:\n affine_ = to_affine_nd(sr, affine)\n src = nib.io_orientation(affine_)\n if self.as_closest_canonical:\n spatial_ornt = src\n else:\n if self.axcodes is None:\n raise AssertionError\n dst = nib.orientations.axcodes2ornt(self.axcodes[:sr], labels=self.labels)\n if len(dst) < sr:\n raise ValueError(\n f\"axcodes must match data_array spatially, got axcodes={len(self.axcodes)}D data_array={sr}D\"\n )\n spatial_ornt = nib.orientations.ornt_transform(src, dst)\n ornt = spatial_ornt.copy()\n ornt[:, 0] += 1 # skip channel dim\n ornt = np.concatenate([np.array([[0, 1]]), ornt])\n shape = data_array.shape[1:]\n data_array = np.ascontiguousarray(nib.orientations.apply_orientation(data_array, ornt))\n new_affine = affine_ @ nib.orientations.inv_ornt_aff(spatial_ornt, shape)\n new_affine = to_affine_nd(affine, new_affine)\n return data_array, affine, new_affine", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Resize_Resize.__init__.self.align_corners.align_corners": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Resize_Resize.__init__.self.align_corners.align_corners", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 322, "end_line": 348, "span_ids": ["Resize.__init__", "Resize"], "tokens": 329}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Resize(Transform):\n \"\"\"\n Resize the input image to given spatial size (with scaling, not cropping/padding).\n Implemented using :py:class:`torch.nn.functional.interpolate`.\n\n Args:\n spatial_size: expected shape of spatial dimensions after resize operation.\n if the components of the `spatial_size` are non-positive values, the transform will use the\n corresponding components of img size. For example, `spatial_size=(32, -1)` will be adapted\n to `(32, 64)` if the second spatial dimension size of img is `64`.\n mode: {``\"nearest\"``, ``\"linear\"``, ``\"bilinear\"``, ``\"bicubic\"``, ``\"trilinear\"``, ``\"area\"``}\n The interpolation mode. Defaults to ``\"area\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n align_corners: This only has an effect when mode is\n 'linear', 'bilinear', 'bicubic' or 'trilinear'. Default: None.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n \"\"\"\n\n def __init__(\n self,\n spatial_size: Union[Sequence[int], int],\n mode: Union[InterpolateMode, str] = InterpolateMode.AREA,\n align_corners: Optional[bool] = None,\n ) -> None:\n self.spatial_size = ensure_tuple(spatial_size)\n self.mode: InterpolateMode = InterpolateMode(mode)\n self.align_corners = align_corners", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Zoom_Zoom.__init__.self.keep_size.keep_size": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Zoom_Zoom.__init__.self.keep_size.keep_size", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 469, "end_line": 506, "span_ids": ["Zoom.__init__", "Zoom"], "tokens": 493}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Zoom(Transform):\n \"\"\"\n Zooms an ND image using :py:class:`torch.nn.functional.interpolate`.\n For details, please see https://pytorch.org/docs/stable/nn.functional.html#interpolate.\n\n Different from :py:class:`monai.transforms.resize`, this transform takes scaling factors\n as input, and provides an option of preserving the input spatial size.\n\n Args:\n zoom: The zoom factor along the spatial axes.\n If a float, zoom is the same for each spatial axis.\n If a sequence, zoom should contain one value for each spatial axis.\n mode: {``\"nearest\"``, ``\"linear\"``, ``\"bilinear\"``, ``\"bicubic\"``, ``\"trilinear\"``, ``\"area\"``}\n The interpolation mode. Defaults to ``\"area\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n padding_mode: {``\"constant\"``, ``\"edge``\", ``\"linear_ramp``\", ``\"maximum``\", ``\"mean``\", `\"median``\",\n ``\"minimum``\", `\"reflect``\", ``\"symmetric``\", ``\"wrap``\", ``\"empty``\", ``\"``\"}\n The mode to pad data after zooming.\n See also: https://numpy.org/doc/stable/reference/generated/numpy.pad.html\n align_corners: This only has an effect when mode is\n 'linear', 'bilinear', 'bicubic' or 'trilinear'. Default: None.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n keep_size: Should keep original size (padding/slicing if needed), default is True.\n \"\"\"\n\n def __init__(\n self,\n zoom: Union[Sequence[float], float],\n mode: Union[InterpolateMode, str] = InterpolateMode.AREA,\n padding_mode: Union[NumpyPadMode, str] = NumpyPadMode.EDGE,\n align_corners: Optional[bool] = None,\n keep_size: bool = True,\n ) -> None:\n self.zoom = zoom\n self.mode: InterpolateMode = InterpolateMode(mode)\n self.padding_mode: NumpyPadMode = NumpyPadMode(padding_mode)\n self.align_corners = align_corners\n self.keep_size = keep_size", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Zoom.__call___Zoom.__call__.return.zoomed_tuple_slice_vec_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Zoom.__call___Zoom.__call__.return.zoomed_tuple_slice_vec_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 537, "end_line": 583, "span_ids": ["Zoom.__call__"], "tokens": 633}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Zoom(Transform):\n\n def __call__(\n self,\n img: np.ndarray,\n mode: Optional[Union[InterpolateMode, str]] = None,\n padding_mode: Optional[Union[NumpyPadMode, str]] = None,\n align_corners: Optional[bool] = None,\n ):\n \"\"\"\n Args:\n img: channel first array, must have shape: (num_channels, H[, W, ..., ]).\n mode: {``\"nearest\"``, ``\"linear\"``, ``\"bilinear\"``, ``\"bicubic\"``, ``\"trilinear\"``, ``\"area\"``}\n The interpolation mode. Defaults to ``self.mode``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n padding_mode: {``\"constant\"``, ``\"edge``\", ``\"linear_ramp``\", ``\"maximum``\", ``\"mean``\", `\"median``\",\n ``\"minimum``\", `\"reflect``\", ``\"symmetric``\", ``\"wrap``\", ``\"empty``\", ``\"``\"}\n The mode to pad data after zooming, default to ``self.padding_mode``.\n See also: https://numpy.org/doc/stable/reference/generated/numpy.pad.html\n align_corners: This only has an effect when mode is\n 'linear', 'bilinear', 'bicubic' or 'trilinear'. Defaults to ``self.align_corners``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n\n \"\"\"\n _zoom = ensure_tuple_rep(self.zoom, img.ndim - 1) # match the spatial image dim\n zoomed = torch.nn.functional.interpolate( # type: ignore\n recompute_scale_factor=True,\n input=torch.as_tensor(np.ascontiguousarray(img), dtype=torch.float).unsqueeze(0),\n scale_factor=list(_zoom),\n mode=self.mode.value if mode is None else InterpolateMode(mode).value,\n align_corners=self.align_corners if align_corners is None else align_corners,\n )\n zoomed = zoomed.squeeze(0).detach().cpu().numpy()\n if not self.keep_size or np.allclose(img.shape, zoomed.shape):\n return zoomed\n\n pad_vec = [[0, 0]] * len(img.shape)\n slice_vec = [slice(None)] * len(img.shape)\n for idx, (od, zd) in enumerate(zip(img.shape, zoomed.shape)):\n diff = od - zd\n half = abs(diff) // 2\n if diff > 0: # need padding\n pad_vec[idx] = [half, diff - half]\n elif diff < 0: # need slicing\n slice_vec[idx] = slice(half, half + od)\n\n padding_mode = self.padding_mode if padding_mode is None else NumpyPadMode(padding_mode)\n zoomed = np.pad(zoomed, pad_vec, mode=padding_mode.value)\n return zoomed[tuple(slice_vec)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandRotate90_RandRotate90.__call__.return.rotator_img_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandRotate90_RandRotate90.__call__.return.rotator_img_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 618, "end_line": 652, "span_ids": ["RandRotate90.randomize", "RandRotate90", "RandRotate90.__init__", "RandRotate90.__call__"], "tokens": 338}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandRotate90(RandomizableTransform):\n \"\"\"\n With probability `prob`, input arrays are rotated by 90 degrees\n in the plane specified by `spatial_axes`.\n \"\"\"\n\n def __init__(self, prob: float = 0.1, max_k: int = 3, spatial_axes: Tuple[int, int] = (0, 1)) -> None:\n \"\"\"\n Args:\n prob: probability of rotating.\n (Default 0.1, with 10% probability it returns a rotated array)\n max_k: number of rotations will be sampled from `np.random.randint(max_k) + 1`, (Default 3).\n spatial_axes: 2 int numbers, defines the plane to rotate with 2 spatial axes.\n Default: (0, 1), this is the first two axis in spatial dimensions.\n \"\"\"\n RandomizableTransform.__init__(self, prob)\n self.max_k = max_k\n self.spatial_axes = spatial_axes\n\n self._rand_k = 0\n\n def randomize(self, data: Optional[Any] = None) -> None:\n self._rand_k = self.R.randint(self.max_k) + 1\n super().randomize(None)\n\n def __call__(self, img: np.ndarray) -> np.ndarray:\n \"\"\"\n Args:\n img: channel first array, must have shape: (num_channels, H[, W, ..., ]),\n \"\"\"\n self.randomize()\n if not self._do_transform:\n return img\n rotator = Rotate90(self._rand_k, self.spatial_axes)\n return rotator(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandFlip_RandFlip.__call__.return.self_flipper_img_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandFlip_RandFlip.__call__.return.self_flipper_img_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 759, "end_line": 782, "span_ids": ["RandFlip", "RandFlip.__init__", "RandFlip.__call__"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandFlip(RandomizableTransform):\n \"\"\"\n Randomly flips the image along axes. Preserves shape.\n See numpy.flip for additional details.\n https://docs.scipy.org/doc/numpy/reference/generated/numpy.flip.html\n\n Args:\n prob: Probability of flipping.\n spatial_axis: Spatial axes along which to flip over. Default is None.\n \"\"\"\n\n def __init__(self, prob: float = 0.1, spatial_axis: Optional[Union[Sequence[int], int]] = None) -> None:\n RandomizableTransform.__init__(self, prob)\n self.flipper = Flip(spatial_axis=spatial_axis)\n\n def __call__(self, img: np.ndarray) -> np.ndarray:\n \"\"\"\n Args:\n img: channel first array, must have shape: (num_channels, H[, W, ..., ]),\n \"\"\"\n self.randomize(None)\n if not self._do_transform:\n return img\n return self.flipper(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandDeformGrid_RandDeformGrid.randomize.self.rand_mag.self_R_uniform_self_magni": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandDeformGrid_RandDeformGrid.randomize.self.rand_mag.self_R_uniform_self_magni", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1108, "end_line": 1142, "span_ids": ["RandDeformGrid", "RandDeformGrid.__init__", "RandDeformGrid.randomize"], "tokens": 316}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandDeformGrid(Randomizable):\n \"\"\"\n Generate random deformation grid.\n \"\"\"\n\n def __init__(\n self,\n spacing: Union[Sequence[float], float],\n magnitude_range: Tuple[float, float],\n as_tensor_output: bool = True,\n device: Optional[torch.device] = None,\n ) -> None:\n \"\"\"\n Args:\n spacing: spacing of the grid in 2D or 3D.\n e.g., spacing=(1, 1) indicates pixel-wise deformation in 2D,\n spacing=(1, 1, 1) indicates voxel-wise deformation in 3D,\n spacing=(2, 2) indicates deformation field defined on every other pixel in 2D.\n magnitude_range: the random offsets will be generated from\n `uniform[magnitude[0], magnitude[1])`.\n as_tensor_output: whether to output tensor instead of numpy array.\n defaults to True.\n device: device to store the output grid data.\n \"\"\"\n self.spacing = spacing\n self.magnitude = magnitude_range\n\n self.rand_mag = 1.0\n self.as_tensor_output = as_tensor_output\n self.random_offset = 0.0\n self.device = device\n\n def randomize(self, grid_size: Sequence[int]) -> None:\n self.random_offset = self.R.normal(size=([len(grid_size)] + list(grid_size))).astype(np.float32)\n self.rand_mag = self.R.uniform(self.magnitude[0], self.magnitude[1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandDeformGrid.__call___RandDeformGrid.__call__.return.control_grid": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandDeformGrid.__call___RandDeformGrid.__call__.return.control_grid", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1144, "end_line": 1155, "span_ids": ["RandDeformGrid.__call__"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandDeformGrid(Randomizable):\n\n def __call__(self, spatial_size: Sequence[int]):\n \"\"\"\n Args:\n spatial_size: spatial size of the grid.\n \"\"\"\n self.spacing = fall_back_tuple(self.spacing, (1.0,) * len(spatial_size))\n control_grid = create_control_grid(spatial_size, self.spacing)\n self.randomize(control_grid.shape[1:])\n control_grid[: len(spatial_size)] += self.rand_mag * self.random_offset\n if self.as_tensor_output:\n control_grid = torch.as_tensor(np.ascontiguousarray(control_grid), device=self.device)\n return control_grid", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Resample_Resample.__init__.self.device.device": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Resample_Resample.__init__.self.device.device", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 987, "end_line": 1012, "span_ids": ["Resample.__init__", "Resample"], "tokens": 310}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Resample(Transform):\n def __init__(\n self,\n mode: Union[GridSampleMode, str] = GridSampleMode.BILINEAR,\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.BORDER,\n as_tensor_output: bool = False,\n device: Optional[torch.device] = None,\n ) -> None:\n \"\"\"\n computes output image using values from `img`, locations from `grid` using pytorch.\n supports spatially 2D or 3D (num_channels, H, W[, D]).\n\n Args:\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"border\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n as_tensor_output: whether to return a torch tensor. Defaults to False.\n device: device on which the tensor will be allocated.\n \"\"\"\n self.mode: GridSampleMode = GridSampleMode(mode)\n self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)\n self.as_tensor_output = as_tensor_output\n self.device = device", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Affine_Affine.__init__.self.padding_mode.GridSamplePadMode_padding": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Affine_Affine.__init__.self.padding_mode.GridSamplePadMode_padding", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1059, "end_line": 1113, "span_ids": ["Affine", "Affine.__init__"], "tokens": 711}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Affine(Transform):\n \"\"\"\n Transform ``img`` given the affine parameters.\n \"\"\"\n\n def __init__(\n self,\n rotate_params: Optional[Union[Sequence[float], float]] = None,\n shear_params: Optional[Union[Sequence[float], float]] = None,\n translate_params: Optional[Union[Sequence[float], float]] = None,\n scale_params: Optional[Union[Sequence[float], float]] = None,\n spatial_size: Optional[Union[Sequence[int], int]] = None,\n mode: Union[GridSampleMode, str] = GridSampleMode.BILINEAR,\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.REFLECTION,\n as_tensor_output: bool = False,\n device: Optional[torch.device] = None,\n ) -> None:\n \"\"\"\n The affine transformations are applied in rotate, shear, translate, scale order.\n\n Args:\n rotate_params: a rotation angle in radians, a scalar for 2D image, a tuple of 3 floats for 3D.\n Defaults to no rotation.\n shear_params: a tuple of 2 floats for 2D, a tuple of 6 floats for 3D. Defaults to no shearing.\n translate_params: a tuple of 2 floats for 2D, a tuple of 3 floats for 3D. Translation is in\n pixel/voxel relative to the center of the input image. Defaults to no translation.\n scale_params: a tuple of 2 floats for 2D, a tuple of 3 floats for 3D. Defaults to no scaling.\n spatial_size: output image spatial size.\n if `spatial_size` and `self.spatial_size` are not defined, or smaller than 1,\n the transform will use the spatial size of `img`.\n if the components of the `spatial_size` are non-positive values, the transform will use the\n corresponding components of img size. For example, `spatial_size=(32, -1)` will be adapted\n to `(32, 64)` if the second spatial dimension size of img is `64`.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"reflection\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n as_tensor_output: the computation is implemented using pytorch tensors, this option specifies\n whether to convert it back to numpy arrays.\n device: device on which the tensor will be allocated.\n \"\"\"\n self.affine_grid = AffineGrid(\n rotate_params=rotate_params,\n shear_params=shear_params,\n translate_params=translate_params,\n scale_params=scale_params,\n as_tensor_output=True,\n device=device,\n )\n self.resampler = Resample(as_tensor_output=as_tensor_output, device=device)\n self.spatial_size = spatial_size\n self.mode: GridSampleMode = GridSampleMode(mode)\n self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandAffine.set_random_state_RandAffine.__call__.return.self_resampler_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandAffine.set_random_state_RandAffine.__call__.return.self_resampler_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1406, "end_line": 1447, "span_ids": ["RandAffine.randomize", "RandAffine.__call__", "RandAffine.set_random_state"], "tokens": 505}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandAffine(RandomizableTransform):\n\n def set_random_state(\n self, seed: Optional[int] = None, state: Optional[np.random.RandomState] = None\n ) -> \"RandAffine\":\n self.rand_affine_grid.set_random_state(seed, state)\n super().set_random_state(seed, state)\n return self\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.rand_affine_grid.randomize()\n\n def __call__(\n self,\n img: Union[np.ndarray, torch.Tensor],\n spatial_size: Optional[Union[Sequence[int], int]] = None,\n mode: Optional[Union[GridSampleMode, str]] = None,\n padding_mode: Optional[Union[GridSamplePadMode, str]] = None,\n ) -> Union[np.ndarray, torch.Tensor]:\n \"\"\"\n Args:\n img: shape must be (num_channels, H, W[, D]),\n spatial_size: output image spatial size.\n if `spatial_size` and `self.spatial_size` are not defined, or smaller than 1,\n the transform will use the spatial size of `img`.\n if `img` has two spatial dimensions, `spatial_size` should have 2 elements [h, w].\n if `img` has three spatial dimensions, `spatial_size` should have 3 elements [h, w, d].\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``self.mode``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``self.padding_mode``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n \"\"\"\n self.randomize()\n sp_size = fall_back_tuple(spatial_size or self.spatial_size, img.shape[1:])\n if self._do_transform:\n grid = self.rand_affine_grid(spatial_size=sp_size)\n else:\n grid = create_grid(spatial_size=sp_size)\n return self.resampler(\n img=img, grid=grid, mode=mode or self.mode, padding_mode=padding_mode or self.padding_mode\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand2DElastic.set_random_state_Rand2DElastic.randomize.self_rand_affine_grid_ran": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand2DElastic.set_random_state_Rand2DElastic.randomize.self_rand_affine_grid_ran", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1525, "end_line": 1536, "span_ids": ["Rand2DElastic.randomize", "Rand2DElastic.set_random_state"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand2DElastic(RandomizableTransform):\n\n def set_random_state(\n self, seed: Optional[int] = None, state: Optional[np.random.RandomState] = None\n ) -> \"Rand2DElastic\":\n self.deform_grid.set_random_state(seed, state)\n self.rand_affine_grid.set_random_state(seed, state)\n super().set_random_state(seed, state)\n return self\n\n def randomize(self, spatial_size: Sequence[int]) -> None:\n super().randomize(None)\n self.deform_grid.randomize(spatial_size)\n self.rand_affine_grid.randomize()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand2DElastic.__call___Rand2DElastic.__call__.return.self_resampler_img_grid_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand2DElastic.__call___Rand2DElastic.__call__.return.self_resampler_img_grid_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1538, "end_line": 1573, "span_ids": ["Rand2DElastic.__call__"], "tokens": 454}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand2DElastic(RandomizableTransform):\n\n def __call__(\n self,\n img: Union[np.ndarray, torch.Tensor],\n spatial_size: Optional[Union[Tuple[int, int], int]] = None,\n mode: Optional[Union[GridSampleMode, str]] = None,\n padding_mode: Optional[Union[GridSamplePadMode, str]] = None,\n ) -> Union[np.ndarray, torch.Tensor]:\n \"\"\"\n Args:\n img: shape must be (num_channels, H, W),\n spatial_size: specifying output image spatial size [h, w].\n if `spatial_size` and `self.spatial_size` are not defined, or smaller than 1,\n the transform will use the spatial size of `img`.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``self.mode``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``self.padding_mode``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n \"\"\"\n sp_size = fall_back_tuple(spatial_size or self.spatial_size, img.shape[1:])\n self.randomize(spatial_size=sp_size)\n if self._do_transform:\n grid = self.deform_grid(spatial_size=sp_size)\n grid = self.rand_affine_grid(grid=grid)\n grid = torch.nn.functional.interpolate( # type: ignore\n recompute_scale_factor=True,\n input=torch.as_tensor(grid).unsqueeze(0),\n scale_factor=list(ensure_tuple(self.deform_grid.spacing)),\n mode=InterpolateMode.BICUBIC.value,\n align_corners=False,\n )\n grid = CenterSpatialCrop(roi_size=sp_size)(grid[0])\n else:\n grid = create_grid(spatial_size=sp_size)\n return self.resampler(img, grid, mode=mode or self.mode, padding_mode=padding_mode or self.padding_mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic_Rand3DElastic.__init__.self.sigma.1_0": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic_Rand3DElastic.__init__.self.sigma.1_0", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1576, "end_line": 1648, "span_ids": ["Rand3DElastic.__init__", "Rand3DElastic"], "tokens": 984}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand3DElastic(RandomizableTransform):\n \"\"\"\n Random elastic deformation and affine in 3D\n \"\"\"\n\n def __init__(\n self,\n sigma_range: Tuple[float, float],\n magnitude_range: Tuple[float, float],\n prob: float = 0.1,\n rotate_range: RandRange = None,\n shear_range: RandRange = None,\n translate_range: RandRange = None,\n scale_range: RandRange = None,\n spatial_size: Optional[Union[Tuple[int, int, int], int]] = None,\n mode: Union[GridSampleMode, str] = GridSampleMode.BILINEAR,\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.REFLECTION,\n as_tensor_output: bool = False,\n device: Optional[torch.device] = None,\n ) -> None:\n \"\"\"\n Args:\n sigma_range: a Gaussian kernel with standard deviation sampled from\n ``uniform[sigma_range[0], sigma_range[1])`` will be used to smooth the random offset grid.\n magnitude_range: the random offsets on the grid will be generated from\n ``uniform[magnitude[0], magnitude[1])``.\n prob: probability of returning a randomized elastic transform.\n defaults to 0.1, with 10% chance returns a randomized elastic transform,\n otherwise returns a ``spatial_size`` centered area extracted from the input image.\n rotate_range: angle range in radians. If element `i` is iterable, then\n `uniform[-rotate_range[i][0], rotate_range[i][1])` will be used to generate the rotation parameter\n for the ith dimension. If not, `uniform[-rotate_range[i], rotate_range[i])` will be used. This can\n be altered on a per-dimension basis. E.g., `((0,3), 1, ...)`: for dim0, rotation will be in range\n `[0, 3]`, and for dim1 `[-1, 1]` will be used. Setting a single value will use `[-x, x]` for dim0\n and nothing for the remaining dimensions.\n shear_range: shear_range with format matching `rotate_range`.\n translate_range: translate_range with format matching `rotate_range`.\n scale_range: scaling_range with format matching `rotate_range`. A value of 1.0 is added to the result.\n This allows 0 to correspond to no change (i.e., a scaling of 1).\n spatial_size: specifying output image spatial size [h, w, d].\n if `spatial_size` and `self.spatial_size` are not defined, or smaller than 1,\n the transform will use the spatial size of `img`.\n if the components of the `spatial_size` are non-positive values, the transform will use the\n corresponding components of img size. For example, `spatial_size=(32, 32, -1)` will be adapted\n to `(32, 32, 64)` if the third spatial dimension size of img is `64`.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"reflection\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n as_tensor_output: the computation is implemented using pytorch tensors, this option specifies\n whether to convert it back to numpy arrays.\n device: device on which the tensor will be allocated.\n\n See also:\n - :py:class:`RandAffineGrid` for the random affine parameters configurations.\n - :py:class:`Affine` for the affine transformation parameters configurations.\n \"\"\"\n RandomizableTransform.__init__(self, prob)\n self.rand_affine_grid = RandAffineGrid(rotate_range, shear_range, translate_range, scale_range, True, device)\n self.resampler = Resample(as_tensor_output=as_tensor_output, device=device)\n\n self.sigma_range = sigma_range\n self.magnitude_range = magnitude_range\n self.spatial_size = spatial_size\n self.mode: GridSampleMode = GridSampleMode(mode)\n self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)\n self.device = device\n\n self.rand_offset = None\n self.magnitude = 1.0\n self.sigma = 1.0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic.set_random_state_Rand3DElastic.randomize.self_rand_affine_grid_ran": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic.set_random_state_Rand3DElastic.randomize.self_rand_affine_grid_ran", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1650, "end_line": 1663, "span_ids": ["Rand3DElastic.randomize", "Rand3DElastic.set_random_state"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand3DElastic(RandomizableTransform):\n\n def set_random_state(\n self, seed: Optional[int] = None, state: Optional[np.random.RandomState] = None\n ) -> \"Rand3DElastic\":\n self.rand_affine_grid.set_random_state(seed, state)\n super().set_random_state(seed, state)\n return self\n\n def randomize(self, grid_size: Sequence[int]) -> None:\n super().randomize(None)\n if self._do_transform:\n self.rand_offset = self.R.uniform(-1.0, 1.0, [3] + list(grid_size)).astype(np.float32)\n self.magnitude = self.R.uniform(self.magnitude_range[0], self.magnitude_range[1])\n self.sigma = self.R.uniform(self.sigma_range[0], self.sigma_range[1])\n self.rand_affine_grid.randomize()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic.__call___": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_Rand3DElastic.__call___", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1665, "end_line": 1697, "span_ids": ["Rand3DElastic.__call__"], "tokens": 448}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand3DElastic(RandomizableTransform):\n\n def __call__(\n self,\n img: Union[np.ndarray, torch.Tensor],\n spatial_size: Optional[Union[Tuple[int, int, int], int]] = None,\n mode: Optional[Union[GridSampleMode, str]] = None,\n padding_mode: Optional[Union[GridSamplePadMode, str]] = None,\n ) -> Union[np.ndarray, torch.Tensor]:\n \"\"\"\n Args:\n img: shape must be (num_channels, H, W, D),\n spatial_size: specifying spatial 3D output image spatial size [h, w, d].\n if `spatial_size` and `self.spatial_size` are not defined, or smaller than 1,\n the transform will use the spatial size of `img`.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``self.mode``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``self.padding_mode``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n \"\"\"\n sp_size = fall_back_tuple(spatial_size or self.spatial_size, img.shape[1:])\n self.randomize(grid_size=sp_size)\n grid = create_grid(spatial_size=sp_size)\n if self._do_transform:\n if self.rand_offset is None:\n raise AssertionError\n grid = torch.as_tensor(np.ascontiguousarray(grid), device=self.device)\n gaussian = GaussianFilter(3, self.sigma, 3.0).to(device=self.device)\n offset = torch.as_tensor(self.rand_offset, device=self.device).unsqueeze(0)\n grid[:3] += gaussian(offset)[0] * self.magnitude\n grid = self.rand_affine_grid(grid=grid)\n return self.resampler(img, grid, mode=mode or self.mode, padding_mode=padding_mode or self.padding_mode)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd.__call___Spacingd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd.__call___Spacingd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 193, "end_line": 227, "span_ids": ["Spacingd.__call__"], "tokens": 348}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Spacingd(MapTransform, InvertibleTransform):\n\n def __call__(\n self, data: Mapping[Union[Hashable, str], Dict[str, np.ndarray]]\n ) -> Dict[Union[Hashable, str], Union[np.ndarray, Dict[str, np.ndarray]]]:\n d: Dict = dict(data)\n for key, mode, padding_mode, align_corners, dtype in self.key_iterator(\n d, self.mode, self.padding_mode, self.align_corners, self.dtype\n ):\n meta_data_key = f\"{key}_{self.meta_key_postfix}\"\n meta_data = d[meta_data_key]\n # resample array of each corresponding key\n # using affine fetched from d[affine_key]\n original_spatial_shape = d[key].shape[1:]\n d[key], old_affine, new_affine = self.spacing_transform(\n data_array=np.asarray(d[key]),\n affine=meta_data[\"affine\"],\n mode=mode,\n padding_mode=padding_mode,\n align_corners=align_corners,\n dtype=dtype,\n )\n self.push_transform(\n d,\n key,\n extra_info={\n \"meta_data_key\": meta_data_key,\n \"old_affine\": old_affine,\n \"mode\": mode.value if isinstance(mode, Enum) else mode,\n \"padding_mode\": padding_mode.value if isinstance(padding_mode, Enum) else padding_mode,\n \"align_corners\": align_corners if align_corners is not None else \"none\",\n },\n orig_size=original_spatial_shape,\n )\n # set the 'affine' key\n meta_data[\"affine\"] = new_affine\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rotate90d_Rotate90d.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rotate90d_Rotate90d.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 348, "end_line": 371, "span_ids": ["Rotate90d.__call__", "Rotate90d.__init__", "Rotate90d"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rotate90d(MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.Rotate90`.\n \"\"\"\n\n def __init__(\n self, keys: KeysCollection, k: int = 1, spatial_axes: Tuple[int, int] = (0, 1), allow_missing_keys: bool = False\n ) -> None:\n \"\"\"\n Args:\n k: number of times to rotate by 90 degrees.\n spatial_axes: 2 int numbers, defines the plane to rotate with 2 spatial axes.\n Default: (0, 1), this is the first two axis in spatial dimensions.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.rotator = Rotate90(k, spatial_axes)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n self.push_transform(d, key)\n d[key] = self.rotator(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotate90d_RandRotate90d.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotate90d_RandRotate90d.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 393, "end_line": 441, "span_ids": ["RandRotate90d", "RandRotate90d.__call__", "RandRotate90d.__init__", "RandRotate90d.randomize"], "tokens": 458}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandRotate90d(RandomizableTransform, MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based version :py:class:`monai.transforms.RandRotate90`.\n With probability `prob`, input arrays are rotated by 90 degrees\n in the plane specified by `spatial_axes`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n prob: float = 0.1,\n max_k: int = 3,\n spatial_axes: Tuple[int, int] = (0, 1),\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n prob: probability of rotating.\n (Default 0.1, with 10% probability it returns a rotated array.)\n max_k: number of rotations will be sampled from `np.random.randint(max_k) + 1`.\n (Default 3)\n spatial_axes: 2 int numbers, defines the plane to rotate with 2 spatial axes.\n Default: (0, 1), this is the first two axis in spatial dimensions.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n\n self.max_k = max_k\n self.spatial_axes = spatial_axes\n\n self._rand_k = 0\n\n def randomize(self, data: Optional[Any] = None) -> None:\n self._rand_k = self.R.randint(self.max_k) + 1\n super().randomize(None)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Mapping[Hashable, np.ndarray]:\n self.randomize()\n d = dict(data)\n\n rotator = Rotate90(self._rand_k, self.spatial_axes)\n for key in self.key_iterator(d):\n if self._do_transform:\n d[key] = rotator(d[key])\n self.push_transform(d, key, extra_info={\"rand_k\": self._rand_k})\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAffined_RandAffined.__init__.self.padding_mode.ensure_tuple_rep_padding_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAffined_RandAffined.__init__.self.padding_mode.ensure_tuple_rep_padding_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 640, "end_line": 711, "span_ids": ["RandAffined.__init__", "RandAffined"], "tokens": 998}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandAffined(RandomizableTransform, MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.RandAffine`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n spatial_size: Optional[Union[Sequence[int], int]] = None,\n prob: float = 0.1,\n rotate_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n shear_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n translate_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n scale_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n mode: GridSampleModeSequence = GridSampleMode.BILINEAR,\n padding_mode: GridSamplePadModeSequence = GridSamplePadMode.REFLECTION,\n as_tensor_output: bool = True,\n device: Optional[torch.device] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n spatial_size: output image spatial size.\n if `spatial_size` and `self.spatial_size` are not defined, or smaller than 1,\n the transform will use the spatial size of `img`.\n if the components of the `spatial_size` are non-positive values, the transform will use the\n corresponding components of img size. For example, `spatial_size=(32, -1)` will be adapted\n to `(32, 64)` if the second spatial dimension size of img is `64`.\n prob: probability of returning a randomized affine grid.\n defaults to 0.1, with 10% chance returns a randomized grid.\n rotate_range: angle range in radians. If element `i` is iterable, then\n `uniform[-rotate_range[i][0], rotate_range[i][1])` will be used to generate the rotation parameter\n for the ith dimension. If not, `uniform[-rotate_range[i], rotate_range[i])` will be used. This can\n be altered on a per-dimension basis. E.g., `((0,3), 1, ...)`: for dim0, rotation will be in range\n `[0, 3]`, and for dim1 `[-1, 1]` will be used. Setting a single value will use `[-x, x]` for dim0\n and nothing for the remaining dimensions.\n shear_range: shear_range with format matching `rotate_range`.\n translate_range: translate_range with format matching `rotate_range`.\n scale_range: scaling_range with format matching `rotate_range`. A value of 1.0 is added to the result.\n This allows 0 to correspond to no change (i.e., a scaling of 1).\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"reflection\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n as_tensor_output: the computation is implemented using pytorch tensors, this option specifies\n whether to convert it back to numpy arrays.\n device: device on which the tensor will be allocated.\n allow_missing_keys: don't raise exception if key is missing.\n\n See also:\n - :py:class:`monai.transforms.compose.MapTransform`\n - :py:class:`RandAffineGrid` for the random affine parameters configurations.\n \"\"\"\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n self.rand_affine = RandAffine(\n prob=1.0, # because probability handled in this class\n rotate_range=rotate_range,\n shear_range=shear_range,\n translate_range=translate_range,\n scale_range=scale_range,\n spatial_size=spatial_size,\n as_tensor_output=as_tensor_output,\n device=device,\n )\n self.mode = ensure_tuple_rep(mode, len(self.keys))\n self.padding_mode = ensure_tuple_rep(padding_mode, len(self.keys))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAffined.set_random_state_RandAffined.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandAffined.set_random_state_RandAffined.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 713, "end_line": 750, "span_ids": ["RandAffined.__call__", "RandAffined.set_random_state", "RandAffined.randomize"], "tokens": 392}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandAffined(RandomizableTransform, MapTransform, InvertibleTransform):\n\n def set_random_state(\n self, seed: Optional[int] = None, state: Optional[np.random.RandomState] = None\n ) -> \"RandAffined\":\n self.rand_affine.set_random_state(seed, state)\n super().set_random_state(seed, state)\n return self\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.rand_affine.randomize()\n\n def __call__(\n self, data: Mapping[Hashable, Union[np.ndarray, torch.Tensor]]\n ) -> Dict[Hashable, Union[np.ndarray, torch.Tensor]]:\n d = dict(data)\n self.randomize()\n\n sp_size = fall_back_tuple(self.rand_affine.spatial_size, data[self.keys[0]].shape[1:])\n if self._do_transform:\n grid = self.rand_affine.rand_affine_grid(spatial_size=sp_size)\n affine = self.rand_affine.rand_affine_grid.get_transformation_matrix()\n else:\n grid = create_grid(spatial_size=sp_size)\n # to be consistent with the self._do_transform case (dtype and device)\n affine = torch.as_tensor(np.eye(len(sp_size) + 1), device=self.rand_affine.rand_affine_grid.device)\n\n for key, mode, padding_mode in self.key_iterator(d, self.mode, self.padding_mode):\n self.push_transform(\n d,\n key,\n extra_info={\n \"affine\": affine,\n \"mode\": mode.value if isinstance(mode, Enum) else mode,\n \"padding_mode\": padding_mode.value if isinstance(padding_mode, Enum) else padding_mode,\n },\n )\n d[key] = self.rand_affine.resampler(d[key], grid, mode=mode, padding_mode=padding_mode)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand2DElasticd_Rand2DElasticd.__init__.self.padding_mode.ensure_tuple_rep_padding_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand2DElasticd_Rand2DElasticd.__init__.self.padding_mode.ensure_tuple_rep_padding_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 779, "end_line": 858, "span_ids": ["Rand2DElasticd", "Rand2DElasticd.__init__"], "tokens": 1101}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand2DElasticd(RandomizableTransform, MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.Rand2DElastic`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n spacing: Union[Tuple[float, float], float],\n magnitude_range: Tuple[float, float],\n spatial_size: Optional[Union[Tuple[int, int], int]] = None,\n prob: float = 0.1,\n rotate_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n shear_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n translate_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n scale_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n mode: GridSampleModeSequence = GridSampleMode.BILINEAR,\n padding_mode: GridSamplePadModeSequence = GridSamplePadMode.REFLECTION,\n as_tensor_output: bool = False,\n device: Optional[torch.device] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n spacing: distance in between the control points.\n magnitude_range: 2 int numbers, the random offsets will be generated from\n ``uniform[magnitude[0], magnitude[1])``.\n spatial_size: specifying output image spatial size [h, w].\n if `spatial_size` and `self.spatial_size` are not defined, or smaller than 1,\n the transform will use the spatial size of `img`.\n if the components of the `spatial_size` are non-positive values, the transform will use the\n corresponding components of img size. For example, `spatial_size=(32, -1)` will be adapted\n to `(32, 64)` if the second spatial dimension size of img is `64`.\n prob: probability of returning a randomized affine grid.\n defaults to 0.1, with 10% chance returns a randomized grid,\n otherwise returns a ``spatial_size`` centered area extracted from the input image.\n rotate_range: angle range in radians. If element `i` is iterable, then\n `uniform[-rotate_range[i][0], rotate_range[i][1])` will be used to generate the rotation parameter\n for the ith dimension. If not, `uniform[-rotate_range[i], rotate_range[i])` will be used. This can\n be altered on a per-dimension basis. E.g., `((0,3), 1, ...)`: for dim0, rotation will be in range\n `[0, 3]`, and for dim1 `[-1, 1]` will be used. Setting a single value will use `[-x, x]` for dim0\n and nothing for the remaining dimensions.\n shear_range: shear_range with format matching `rotate_range`.\n translate_range: translate_range with format matching `rotate_range`.\n scale_range: scaling_range with format matching `rotate_range`. A value of 1.0 is added to the result.\n This allows 0 to correspond to no change (i.e., a scaling of 1).\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"reflection\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n as_tensor_output: the computation is implemented using pytorch tensors, this option specifies\n whether to convert it back to numpy arrays.\n device: device on which the tensor will be allocated.\n allow_missing_keys: don't raise exception if key is missing.\n\n See also:\n - :py:class:`RandAffineGrid` for the random affine parameters configurations.\n - :py:class:`Affine` for the affine transformation parameters configurations.\n \"\"\"\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n self.rand_2d_elastic = Rand2DElastic(\n spacing=spacing,\n magnitude_range=magnitude_range,\n prob=1.0, # because probability controlled by this class\n rotate_range=rotate_range,\n shear_range=shear_range,\n translate_range=translate_range,\n scale_range=scale_range,\n spatial_size=spatial_size,\n as_tensor_output=as_tensor_output,\n device=device,\n )\n self.mode = ensure_tuple_rep(mode, len(self.keys))\n self.padding_mode = ensure_tuple_rep(padding_mode, len(self.keys))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand2DElasticd.set_random_state_Rand2DElasticd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand2DElasticd.set_random_state_Rand2DElasticd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 860, "end_line": 895, "span_ids": ["Rand2DElasticd.randomize", "Rand2DElasticd.__call__", "Rand2DElasticd.set_random_state"], "tokens": 397}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand2DElasticd(RandomizableTransform, MapTransform):\n\n def set_random_state(\n self, seed: Optional[int] = None, state: Optional[np.random.RandomState] = None\n ) -> \"Rand2DElasticd\":\n self.rand_2d_elastic.set_random_state(seed, state)\n super().set_random_state(seed, state)\n return self\n\n def randomize(self, spatial_size: Sequence[int]) -> None:\n super().randomize(None)\n self.rand_2d_elastic.randomize(spatial_size)\n\n def __call__(\n self, data: Mapping[Hashable, Union[np.ndarray, torch.Tensor]]\n ) -> Dict[Hashable, Union[np.ndarray, torch.Tensor]]:\n d = dict(data)\n\n sp_size = fall_back_tuple(self.rand_2d_elastic.spatial_size, data[self.keys[0]].shape[1:])\n self.randomize(spatial_size=sp_size)\n\n if self._do_transform:\n grid = self.rand_2d_elastic.deform_grid(spatial_size=sp_size)\n grid = self.rand_2d_elastic.rand_affine_grid(grid=grid)\n grid = torch.nn.functional.interpolate( # type: ignore\n recompute_scale_factor=True,\n input=grid.unsqueeze(0),\n scale_factor=ensure_tuple_rep(self.rand_2d_elastic.deform_grid.spacing, 2),\n mode=InterpolateMode.BICUBIC.value,\n align_corners=False,\n )\n grid = CenterSpatialCrop(roi_size=sp_size)(grid[0])\n else:\n grid = create_grid(spatial_size=sp_size)\n\n for key, mode, padding_mode in self.key_iterator(d, self.mode, self.padding_mode):\n d[key] = self.rand_2d_elastic.resampler(d[key], grid, mode=mode, padding_mode=padding_mode)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand3DElasticd_Rand3DElasticd.__init__.self.padding_mode.ensure_tuple_rep_padding_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand3DElasticd_Rand3DElasticd.__init__.self.padding_mode.ensure_tuple_rep_padding_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 898, "end_line": 978, "span_ids": ["Rand3DElasticd", "Rand3DElasticd.__init__"], "tokens": 1136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand3DElasticd(RandomizableTransform, MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.Rand3DElastic`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n sigma_range: Tuple[float, float],\n magnitude_range: Tuple[float, float],\n spatial_size: Optional[Union[Tuple[int, int, int], int]] = None,\n prob: float = 0.1,\n rotate_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n shear_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n translate_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n scale_range: Optional[Union[Sequence[Union[Tuple[float, float], float]], float]] = None,\n mode: GridSampleModeSequence = GridSampleMode.BILINEAR,\n padding_mode: GridSamplePadModeSequence = GridSamplePadMode.REFLECTION,\n as_tensor_output: bool = False,\n device: Optional[torch.device] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n sigma_range: a Gaussian kernel with standard deviation sampled from\n ``uniform[sigma_range[0], sigma_range[1])`` will be used to smooth the random offset grid.\n magnitude_range: the random offsets on the grid will be generated from\n ``uniform[magnitude[0], magnitude[1])``.\n spatial_size: specifying output image spatial size [h, w, d].\n if `spatial_size` and `self.spatial_size` are not defined, or smaller than 1,\n the transform will use the spatial size of `img`.\n if the components of the `spatial_size` are non-positive values, the transform will use the\n corresponding components of img size. For example, `spatial_size=(32, 32, -1)` will be adapted\n to `(32, 32, 64)` if the third spatial dimension size of img is `64`.\n prob: probability of returning a randomized affine grid.\n defaults to 0.1, with 10% chance returns a randomized grid,\n otherwise returns a ``spatial_size`` centered area extracted from the input image.\n rotate_range: angle range in radians. If element `i` is iterable, then\n `uniform[-rotate_range[i][0], rotate_range[i][1])` will be used to generate the rotation parameter\n for the ith dimension. If not, `uniform[-rotate_range[i], rotate_range[i])` will be used. This can\n be altered on a per-dimension basis. E.g., `((0,3), 1, ...)`: for dim0, rotation will be in range\n `[0, 3]`, and for dim1 `[-1, 1]` will be used. Setting a single value will use `[-x, x]` for dim0\n and nothing for the remaining dimensions.\n shear_range: shear_range with format matching `rotate_range`.\n translate_range: translate_range with format matching `rotate_range`.\n scale_range: scaling_range with format matching `rotate_range`. A value of 1.0 is added to the result.\n This allows 0 to correspond to no change (i.e., a scaling of 1).\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"reflection\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n as_tensor_output: the computation is implemented using pytorch tensors, this option specifies\n whether to convert it back to numpy arrays.\n device: device on which the tensor will be allocated.\n allow_missing_keys: don't raise exception if key is missing.\n\n See also:\n - :py:class:`RandAffineGrid` for the random affine parameters configurations.\n - :py:class:`Affine` for the affine transformation parameters configurations.\n \"\"\"\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n self.rand_3d_elastic = Rand3DElastic(\n sigma_range=sigma_range,\n magnitude_range=magnitude_range,\n prob=1.0, # because probability controlled by this class\n rotate_range=rotate_range,\n shear_range=shear_range,\n translate_range=translate_range,\n scale_range=scale_range,\n spatial_size=spatial_size,\n as_tensor_output=as_tensor_output,\n device=device,\n )\n self.mode = ensure_tuple_rep(mode, len(self.keys))\n self.padding_mode = ensure_tuple_rep(padding_mode, len(self.keys))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand3DElasticd.set_random_state_Rand3DElasticd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Rand3DElasticd.set_random_state_Rand3DElasticd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 980, "end_line": 1009, "span_ids": ["Rand3DElasticd.__call__", "Rand3DElasticd.set_random_state", "Rand3DElasticd.randomize"], "tokens": 381}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Rand3DElasticd(RandomizableTransform, MapTransform):\n\n def set_random_state(\n self, seed: Optional[int] = None, state: Optional[np.random.RandomState] = None\n ) -> \"Rand3DElasticd\":\n self.rand_3d_elastic.set_random_state(seed, state)\n super().set_random_state(seed, state)\n return self\n\n def randomize(self, grid_size: Sequence[int]) -> None:\n super().randomize(None)\n self.rand_3d_elastic.randomize(grid_size)\n\n def __call__(\n self, data: Mapping[Hashable, Union[np.ndarray, torch.Tensor]]\n ) -> Dict[Hashable, Union[np.ndarray, torch.Tensor]]:\n d = dict(data)\n sp_size = fall_back_tuple(self.rand_3d_elastic.spatial_size, data[self.keys[0]].shape[1:])\n\n self.randomize(grid_size=sp_size)\n grid = create_grid(spatial_size=sp_size)\n if self._do_transform:\n device = self.rand_3d_elastic.device\n grid = torch.tensor(grid).to(device)\n gaussian = GaussianFilter(spatial_dims=3, sigma=self.rand_3d_elastic.sigma, truncated=3.0).to(device)\n offset = torch.tensor(self.rand_3d_elastic.rand_offset, device=device).unsqueeze(0)\n grid[:3] += gaussian(offset)[0] * self.rand_3d_elastic.magnitude\n grid = self.rand_3d_elastic.rand_affine_grid(grid=grid)\n\n for key, mode, padding_mode in self.key_iterator(d, self.mode, self.padding_mode):\n d[key] = self.rand_3d_elastic.resampler(d[key], grid, mode=mode, padding_mode=padding_mode)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Flipd_Flipd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Flipd_Flipd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1012, "end_line": 1039, "span_ids": ["Flipd.__init__", "Flipd", "Flipd.__call__"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Flipd(MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.Flip`.\n\n See `numpy.flip` for additional details.\n https://docs.scipy.org/doc/numpy/reference/generated/numpy.flip.html\n\n Args:\n keys: Keys to pick data for transformation.\n spatial_axis: Spatial axes along which to flip over. Default is None.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n spatial_axis: Optional[Union[Sequence[int], int]] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.flipper = Flip(spatial_axis=spatial_axis)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n self.push_transform(d, key)\n d[key] = self.flipper(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandFlipd_RandFlipd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandFlipd_RandFlipd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1056, "end_line": 1090, "span_ids": ["RandFlipd", "RandFlipd.__init__", "RandFlipd.__call__"], "tokens": 289}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandFlipd(RandomizableTransform, MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based version :py:class:`monai.transforms.RandFlip`.\n\n See `numpy.flip` for additional details.\n https://docs.scipy.org/doc/numpy/reference/generated/numpy.flip.html\n\n Args:\n keys: Keys to pick data for transformation.\n prob: Probability of flipping.\n spatial_axis: Spatial axes along which to flip over. Default is None.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n prob: float = 0.1,\n spatial_axis: Optional[Union[Sequence[int], int]] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n self.spatial_axis = spatial_axis\n\n self.flipper = Flip(spatial_axis=spatial_axis)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n self.randomize(None)\n d = dict(data)\n for key in self.key_iterator(d):\n if self._do_transform:\n d[key] = self.flipper(d[key])\n self.push_transform(d, key)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated_RandRotated._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated_RandRotated._", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1263, "end_line": 1296, "span_ids": ["RandRotated"], "tokens": 517}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandRotated(RandomizableTransform, MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based version :py:class:`monai.transforms.RandRotate`\n Randomly rotates the input arrays.\n\n Args:\n keys: Keys to pick data for transformation.\n range_x: Range of rotation angle in radians in the plane defined by the first and second axes.\n If single number, angle is uniformly sampled from (-range_x, range_x).\n range_y: Range of rotation angle in radians in the plane defined by the first and third axes.\n If single number, angle is uniformly sampled from (-range_y, range_y).\n range_z: Range of rotation angle in radians in the plane defined by the second and third axes.\n If single number, angle is uniformly sampled from (-range_z, range_z).\n prob: Probability of rotation.\n keep_size: If it is False, the output shape is adapted so that the\n input array is contained completely in the output.\n If it is True, the output shape is the same as the input. Default is True.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"border\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n align_corners: Defaults to False.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n It also can be a sequence of bool, each element corresponds to a key in ``keys``.\n dtype: data type for resampling computation. Defaults to ``np.float64`` for best precision.\n If None, use the data type of input data. To be compatible with other modules,\n the output data type is always ``np.float32``.\n It also can be a sequence of dtype or None, each element corresponds to a key in ``keys``.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated.__init___RandRotated.randomize.self.z.self_R_uniform_low_self_r": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated.__init___RandRotated.randomize.self.z.self_R_uniform_low_self_r", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1298, "end_line": 1338, "span_ids": ["RandRotated.randomize", "RandRotated.__init__"], "tokens": 514}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandRotated(RandomizableTransform, MapTransform, InvertibleTransform):\n\n def __init__(\n self,\n keys: KeysCollection,\n range_x: Union[Tuple[float, float], float] = 0.0,\n range_y: Union[Tuple[float, float], float] = 0.0,\n range_z: Union[Tuple[float, float], float] = 0.0,\n prob: float = 0.1,\n keep_size: bool = True,\n mode: GridSampleModeSequence = GridSampleMode.BILINEAR,\n padding_mode: GridSamplePadModeSequence = GridSamplePadMode.BORDER,\n align_corners: Union[Sequence[bool], bool] = False,\n dtype: Union[Sequence[DtypeLike], DtypeLike] = np.float64,\n allow_missing_keys: bool = False,\n ) -> None:\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n self.range_x = ensure_tuple(range_x)\n if len(self.range_x) == 1:\n self.range_x = tuple(sorted([-self.range_x[0], self.range_x[0]]))\n self.range_y = ensure_tuple(range_y)\n if len(self.range_y) == 1:\n self.range_y = tuple(sorted([-self.range_y[0], self.range_y[0]]))\n self.range_z = ensure_tuple(range_z)\n if len(self.range_z) == 1:\n self.range_z = tuple(sorted([-self.range_z[0], self.range_z[0]]))\n\n self.keep_size = keep_size\n self.mode = ensure_tuple_rep(mode, len(self.keys))\n self.padding_mode = ensure_tuple_rep(padding_mode, len(self.keys))\n self.align_corners = ensure_tuple_rep(align_corners, len(self.keys))\n self.dtype = ensure_tuple_rep(dtype, len(self.keys))\n\n self.x = 0.0\n self.y = 0.0\n self.z = 0.0\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.x = self.R.uniform(low=self.range_x[0], high=self.range_x[1])\n self.y = self.R.uniform(low=self.range_y[0], high=self.range_y[1])\n self.z = self.R.uniform(low=self.range_z[0], high=self.range_z[1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated.__call___RandRotated.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_RandRotated.__call___RandRotated.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1340, "end_line": 1374, "span_ids": ["RandRotated.__call__"], "tokens": 315}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandRotated(RandomizableTransform, MapTransform, InvertibleTransform):\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n self.randomize()\n d = dict(data)\n angle: Union[Sequence[float], float] = self.x if d[self.keys[0]].ndim == 3 else (self.x, self.y, self.z)\n rotator = Rotate(\n angle=angle,\n keep_size=self.keep_size,\n )\n for key, mode, padding_mode, align_corners, dtype in self.key_iterator(\n d, self.mode, self.padding_mode, self.align_corners, self.dtype\n ):\n orig_size = d[key].shape[1:]\n if self._do_transform:\n d[key] = rotator(\n d[key],\n mode=mode,\n padding_mode=padding_mode,\n align_corners=align_corners,\n dtype=dtype,\n )\n rot_mat = rotator.get_rotation_matrix()\n else:\n rot_mat = np.eye(d[key].ndim)\n self.push_transform(\n d,\n key,\n orig_size=orig_size,\n extra_info={\n \"rot_mat\": rot_mat,\n \"mode\": mode.value if isinstance(mode, Enum) else mode,\n \"padding_mode\": padding_mode.value if isinstance(padding_mode, Enum) else padding_mode,\n \"align_corners\": align_corners if align_corners is not None else \"none\",\n },\n )\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_SpacingD_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_SpacingD_", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1621, "end_line": 1637, "span_ids": ["impl:12"], "tokens": 194}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "SpacingD = SpacingDict = Spacingd\nOrientationD = OrientationDict = Orientationd\nRotate90D = Rotate90Dict = Rotate90d\nRandRotate90D = RandRotate90Dict = RandRotate90d\nResizeD = ResizeDict = Resized\nAffineD = AffineDict = Affined\nRandAffineD = RandAffineDict = RandAffined\nRand2DElasticD = Rand2DElasticDict = Rand2DElasticd\nRand3DElasticD = Rand3DElasticDict = Rand3DElasticd\nFlipD = FlipDict = Flipd\nRandFlipD = RandFlipDict = RandFlipd\nRandAxisFlipD = RandAxisFlipDict = RandAxisFlipd\nRotateD = RotateDict = Rotated\nRandRotateD = RandRotateDict = RandRotated\nZoomD = ZoomDict = Zoomd\nRandZoomD = RandZoomDict = RandZoomd", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/__init__.py__": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/__init__.py__", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 11, "end_line": 11, "span_ids": [], "tokens": 0}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AsChannelFirst_AsChannelFirst.__call__.return.np_moveaxis_img_self_cha": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AsChannelFirst_AsChannelFirst.__call__.return.np_moveaxis_img_self_cha", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 73, "end_line": 98, "span_ids": ["AsChannelFirst.__init__", "AsChannelFirst", "AsChannelFirst.__call__"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AsChannelFirst(Transform):\n \"\"\"\n Change the channel dimension of the image to the first dimension.\n\n Most of the image transformations in ``monai.transforms``\n assume the input image is in the channel-first format, which has the shape\n (num_channels, spatial_dim_1[, spatial_dim_2, ...]).\n\n This transform could be used to convert, for example, a channel-last image array in shape\n (spatial_dim_1[, spatial_dim_2, ...], num_channels) into the channel-first format,\n so that the multidimensional image array can be correctly interpreted by the other transforms.\n\n Args:\n channel_dim: which dimension of input image is the channel, default is the last dimension.\n \"\"\"\n\n def __init__(self, channel_dim: int = -1) -> None:\n if not (isinstance(channel_dim, int) and channel_dim >= -1):\n raise AssertionError(\"invalid channel dimension.\")\n self.channel_dim = channel_dim\n\n def __call__(self, img: np.ndarray) -> np.ndarray:\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n return np.moveaxis(img, self.channel_dim, 0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AsChannelLast_AsChannelLast.__call__.return.np_moveaxis_img_self_cha": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AsChannelLast_AsChannelLast.__call__.return.np_moveaxis_img_self_cha", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 101, "end_line": 125, "span_ids": ["AsChannelLast.__init__", "AsChannelLast.__call__", "AsChannelLast"], "tokens": 241}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AsChannelLast(Transform):\n \"\"\"\n Change the channel dimension of the image to the last dimension.\n\n Some of other 3rd party transforms assume the input image is in the channel-last format with shape\n (spatial_dim_1[, spatial_dim_2, ...], num_channels).\n\n This transform could be used to convert, for example, a channel-first image array in shape\n (num_channels, spatial_dim_1[, spatial_dim_2, ...]) into the channel-last format,\n so that MONAI transforms can construct a chain with other 3rd party transforms together.\n\n Args:\n channel_dim: which dimension of input image is the channel, default is the first dimension.\n \"\"\"\n\n def __init__(self, channel_dim: int = 0) -> None:\n if not (isinstance(channel_dim, int) and channel_dim >= -1):\n raise AssertionError(\"invalid channel dimension.\")\n self.channel_dim = channel_dim\n\n def __call__(self, img: np.ndarray) -> np.ndarray:\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n return np.moveaxis(img, self.channel_dim, -1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AddChannel_AddChannel.__call__.return.img_None_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_AddChannel_AddChannel.__call__.return.img_None_", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 128, "end_line": 146, "span_ids": ["AddChannel.__call__", "AddChannel"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AddChannel(Transform):\n \"\"\"\n Adds a 1-length channel dimension to the input image.\n\n Most of the image transformations in ``monai.transforms``\n assumes the input image is in the channel-first format, which has the shape\n (num_channels, spatial_dim_1[, spatial_dim_2, ...]).\n\n This transform could be used, for example, to convert a (spatial_dim_1[, spatial_dim_2, ...])\n spatial image into the channel-first format so that the\n multidimensional image array can be correctly interpreted by the other\n transforms.\n \"\"\"\n\n def __call__(self, img: NdarrayTensor):\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n return img[None]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_RepeatChannel_RepeatChannel.__call__.return.np_repeat_img_self_repea": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_RepeatChannel_RepeatChannel.__call__.return.np_repeat_img_self_repea", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 174, "end_line": 193, "span_ids": ["RepeatChannel.__call__", "RepeatChannel.__init__", "RepeatChannel"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RepeatChannel(Transform):\n \"\"\"\n Repeat channel data to construct expected input shape for models.\n The `repeats` count includes the origin data, for example:\n ``RepeatChannel(repeats=2)([[1, 2], [3, 4]])`` generates: ``[[1, 2], [1, 2], [3, 4], [3, 4]]``\n\n Args:\n repeats: the number of repetitions for each element.\n \"\"\"\n\n def __init__(self, repeats: int) -> None:\n if repeats <= 0:\n raise AssertionError(\"repeats count must be greater than 0.\")\n self.repeats = repeats\n\n def __call__(self, img: np.ndarray) -> np.ndarray:\n \"\"\"\n Apply the transform to `img`, assuming `img` is a \"channel-first\" array.\n \"\"\"\n return np.repeat(img, self.repeats, 0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_DataStats_DataStats.__init__.if_logger_handler_is_not_.self__logger_addHandler_l": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_DataStats_DataStats.__init__.if_logger_handler_is_not_.self__logger_addHandler_l", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 380, "end_line": 432, "span_ids": ["DataStats", "DataStats.__init__"], "tokens": 507}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DataStats(Transform):\n \"\"\"\n Utility transform to show the statistics of data for debug or analysis.\n It can be inserted into any place of a transform chain and check results of previous transforms.\n It support both `numpy.ndarray` and `torch.tensor` as input data,\n so it can be used in pre-processing and post-processing.\n \"\"\"\n\n def __init__(\n self,\n prefix: str = \"Data\",\n data_type: bool = True,\n data_shape: bool = True,\n value_range: bool = True,\n data_value: bool = False,\n additional_info: Optional[Callable] = None,\n logger_handler: Optional[logging.Handler] = None,\n ) -> None:\n \"\"\"\n Args:\n prefix: will be printed in format: \"{prefix} statistics\".\n data_type: whether to show the type of input data.\n data_shape: whether to show the shape of input data.\n value_range: whether to show the value range of input data.\n data_value: whether to show the raw value of input data.\n a typical example is to print some properties of Nifti image: affine, pixdim, etc.\n additional_info: user can define callable function to extract additional info from input data.\n logger_handler: add additional handler to output data: save to file, etc.\n add existing python logging handlers: https://docs.python.org/3/library/logging.handlers.html\n the handler should have a logging level of at least `INFO`.\n\n Raises:\n TypeError: When ``additional_info`` is not an ``Optional[Callable]``.\n\n \"\"\"\n if not isinstance(prefix, str):\n raise AssertionError(\"prefix must be a string.\")\n self.prefix = prefix\n self.data_type = data_type\n self.data_shape = data_shape\n self.value_range = value_range\n self.data_value = data_value\n if additional_info is not None and not callable(additional_info):\n raise TypeError(f\"additional_info must be None or callable but is {type(additional_info).__name__}.\")\n self.additional_info = additional_info\n self.output: Optional[str] = None\n self._logger = logging.getLogger(\"DataStats\")\n self._logger.setLevel(logging.INFO)\n console = logging.StreamHandler(sys.stdout) # always stdout\n console.setLevel(logging.INFO)\n self._logger.addHandler(console)\n if logger_handler is not None:\n self._logger.addHandler(logger_handler)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_DataStats.__call___DataStats.__call__.return.img": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_DataStats.__call___DataStats.__call__.return.img", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 434, "end_line": 469, "span_ids": ["DataStats.__call__"], "tokens": 363}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DataStats(Transform):\n\n def __call__(\n self,\n img: NdarrayTensor,\n prefix: Optional[str] = None,\n data_type: Optional[bool] = None,\n data_shape: Optional[bool] = None,\n value_range: Optional[bool] = None,\n data_value: Optional[bool] = None,\n additional_info: Optional[Callable] = None,\n ) -> NdarrayTensor:\n \"\"\"\n Apply the transform to `img`, optionally take arguments similar to the class constructor.\n \"\"\"\n lines = [f\"{prefix or self.prefix} statistics:\"]\n\n if self.data_type if data_type is None else data_type:\n lines.append(f\"Type: {type(img)}\")\n if self.data_shape if data_shape is None else data_shape:\n lines.append(f\"Shape: {img.shape}\")\n if self.value_range if value_range is None else value_range:\n if isinstance(img, np.ndarray):\n lines.append(f\"Value range: ({np.min(img)}, {np.max(img)})\")\n elif isinstance(img, torch.Tensor):\n lines.append(f\"Value range: ({torch.min(img)}, {torch.max(img)})\")\n else:\n lines.append(f\"Value range: (not a PyTorch or Numpy array, type: {type(img)})\")\n if self.data_value if data_value is None else data_value:\n lines.append(f\"Value: {img}\")\n additional_info = self.additional_info if additional_info is None else additional_info\n if additional_info is not None:\n lines.append(f\"Additional info: {additional_info(img)}\")\n separator = \"\\n\"\n self.output = f\"{separator.join(lines)}\"\n self._logger.info(self.output)\n\n return img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_SimulateDelay_SimulateDelay.__call__.return.img": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_SimulateDelay_SimulateDelay.__call__.return.img", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 326, "end_line": 355, "span_ids": ["SimulateDelay.__call__", "SimulateDelay.__init__", "SimulateDelay"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SimulateDelay(Transform):\n \"\"\"\n This is a pass through transform to be used for testing purposes. It allows\n adding fake behaviors that are useful for testing purposes to simulate\n how large datasets behave without needing to test on large data sets.\n\n For example, simulating slow NFS data transfers, or slow network transfers\n in testing by adding explicit timing delays. Testing of small test data\n can lead to incomplete understanding of real world issues, and may lead\n to sub-optimal design choices.\n \"\"\"\n\n def __init__(self, delay_time: float = 0.0) -> None:\n \"\"\"\n Args:\n delay_time: The minimum amount of time, in fractions of seconds,\n to accomplish this delay task.\n \"\"\"\n super().__init__()\n self.delay_time: float = delay_time\n\n def __call__(self, img: NdarrayTensor, delay_time: Optional[float] = None) -> NdarrayTensor:\n \"\"\"\n Args:\n img: data remain unchanged throughout this transform.\n delay_time: The minimum amount of time, in fractions of seconds,\n to accomplish this delay task.\n \"\"\"\n time.sleep(self.delay_time if delay_time is None else delay_time)\n return img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_LabelToMask_LabelToMask.__init__.self.merge_channels.merge_channels": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_LabelToMask_LabelToMask.__init__.self.merge_channels.merge_channels", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 408, "end_line": 433, "span_ids": ["LabelToMask.__init__", "LabelToMask"], "tokens": 297}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LabelToMask(Transform):\n \"\"\"\n Convert labels to mask for other tasks. A typical usage is to convert segmentation labels\n to mask data to pre-process images and then feed the images into classification network.\n It can support single channel labels or One-Hot labels with specified `select_labels`.\n For example, users can select `label value = [2, 3]` to construct mask data, or select the\n second and the third channels of labels to construct mask data.\n The output mask data can be a multiple channels binary data or a single channel binary\n data that merges all the channels.\n\n Args:\n select_labels: labels to generate mask from. for 1 channel label, the `select_labels`\n is the expected label values, like: [1, 2, 3]. for One-Hot format label, the\n `select_labels` is the expected channel indices.\n merge_channels: whether to use `np.any()` to merge the result on channel dim. if yes,\n will return a single channel mask with binary data.\n\n \"\"\"\n\n def __init__( # pytype: disable=annotation-type-mismatch\n self,\n select_labels: Union[Sequence[int], int],\n merge_channels: bool = False,\n ) -> None: # pytype: disable=annotation-type-mismatch\n self.select_labels = ensure_tuple(select_labels)\n self.merge_channels = merge_channels", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_Identityd_Identityd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_Identityd_Identityd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 137, "end_line": 159, "span_ids": ["Identityd.__init__", "Identityd", "Identityd.__call__"], "tokens": 177}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Identityd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.Identity`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.identity = Identity()\n\n def __call__(\n self, data: Mapping[Hashable, Union[np.ndarray, torch.Tensor]]\n ) -> Dict[Hashable, Union[np.ndarray, torch.Tensor]]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.identity(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AsChannelFirstd_AsChannelFirstd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AsChannelFirstd_AsChannelFirstd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 162, "end_line": 182, "span_ids": ["AsChannelFirstd.__init__", "AsChannelFirstd.__call__", "AsChannelFirstd"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AsChannelFirstd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.AsChannelFirst`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, channel_dim: int = -1, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n channel_dim: which dimension of input image is the channel, default is the last dimension.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.converter = AsChannelFirst(channel_dim=channel_dim)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AsChannelLastd_AsChannelLastd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AsChannelLastd_AsChannelLastd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 185, "end_line": 205, "span_ids": ["AsChannelLastd.__call__", "AsChannelLastd.__init__", "AsChannelLastd"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AsChannelLastd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.AsChannelLast`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, channel_dim: int = 0, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n channel_dim: which dimension of input image is the channel, default is the first dimension.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.converter = AsChannelLast(channel_dim=channel_dim)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AddChanneld_AddChanneld.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_AddChanneld_AddChanneld.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 208, "end_line": 227, "span_ids": ["AddChanneld.__call__", "AddChanneld.__init__", "AddChanneld"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AddChanneld(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.AddChannel`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.adder = AddChannel()\n\n def __call__(self, data: Mapping[Hashable, NdarrayTensor]) -> Dict[Hashable, NdarrayTensor]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.adder(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_RepeatChanneld_RepeatChanneld.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_RepeatChanneld_RepeatChanneld.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 256, "end_line": 276, "span_ids": ["RepeatChanneld.__init__", "RepeatChanneld", "RepeatChanneld.__call__"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RepeatChanneld(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.RepeatChannel`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, repeats: int, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n repeats: the number of repetitions for each element.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.repeater = RepeatChannel(repeats)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.repeater(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_CastToTyped_CastToTyped.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_CastToTyped_CastToTyped.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 352, "end_line": 384, "span_ids": ["CastToTyped", "CastToTyped.__init__", "CastToTyped.__call__"], "tokens": 281}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CastToTyped(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.CastToType`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n dtype: Union[Sequence[Union[DtypeLike, torch.dtype]], DtypeLike, torch.dtype] = np.float32,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n dtype: convert image to this data type, default is `np.float32`.\n it also can be a sequence of dtypes or torch.dtype,\n each element corresponds to a key in ``keys``.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n MapTransform.__init__(self, keys, allow_missing_keys)\n self.dtype = ensure_tuple_rep(dtype, len(self.keys))\n self.converter = CastToType()\n\n def __call__(\n self, data: Mapping[Hashable, Union[np.ndarray, torch.Tensor]]\n ) -> Dict[Hashable, Union[np.ndarray, torch.Tensor]]:\n d = dict(data)\n for key, dtype in self.key_iterator(d, self.dtype):\n d[key] = self.converter(d[key], dtype=dtype)\n\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ToNumpyd_ToNumpyd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ToNumpyd_ToNumpyd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 421, "end_line": 440, "span_ids": ["ToNumpyd.__init__", "ToNumpyd", "ToNumpyd.__call__"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ToNumpyd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.ToNumpy`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.converter = ToNumpy()\n\n def __call__(self, data: Mapping[Hashable, Any]) -> Dict[Hashable, Any]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_SqueezeDimd_SqueezeDimd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_SqueezeDimd_SqueezeDimd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 486, "end_line": 506, "span_ids": ["SqueezeDimd.__init__", "SqueezeDimd", "SqueezeDimd.__call__"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SqueezeDimd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.SqueezeDim`.\n \"\"\"\n\n def __init__(self, keys: KeysCollection, dim: int = 0, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n dim: dimension to be squeezed. Default: 0 (the first dimension)\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.converter = SqueezeDim(dim=dim)\n\n def __call__(self, data: Mapping[Hashable, NdarrayTensor]) -> Dict[Hashable, NdarrayTensor]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_SimulateDelayd_SimulateDelayd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_SimulateDelayd_SimulateDelayd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 577, "end_line": 602, "span_ids": ["SimulateDelayd.__init__", "SimulateDelayd.__call__", "SimulateDelayd"], "tokens": 266}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SimulateDelayd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.SimulateDelay`.\n \"\"\"\n\n def __init__(\n self, keys: KeysCollection, delay_time: Union[Sequence[float], float] = 0.0, allow_missing_keys: bool = False\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n delay_time: The minimum amount of time, in fractions of seconds, to accomplish this identity task.\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.delay_time = ensure_tuple_rep(delay_time, len(self.keys))\n self.delayer = SimulateDelay()\n\n def __call__(self, data: Mapping[Hashable, NdarrayTensor]) -> Dict[Hashable, NdarrayTensor]:\n d = dict(data)\n for key, delay_time in self.key_iterator(d, self.delay_time):\n d[key] = self.delayer(d[key], delay_time=delay_time)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ConcatItemsd_ConcatItemsd.__init__.self.dim.dim": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ConcatItemsd_ConcatItemsd.__init__.self.dim.dim", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 661, "end_line": 679, "span_ids": ["ConcatItemsd", "ConcatItemsd.__init__"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ConcatItemsd(MapTransform):\n \"\"\"\n Concatenate specified items from data dictionary together on the first dim to construct a big array.\n Expect all the items are numpy array or PyTorch Tensor.\n\n \"\"\"\n\n def __init__(self, keys: KeysCollection, name: str, dim: int = 0, allow_missing_keys: bool = False) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be concatenated together.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n name: the name corresponding to the key to store the concatenated data.\n dim: on which dimension to concatenate the items, default is 0.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.name = name\n self.dim = dim", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ConcatItemsd.__call___ConcatItemsd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_ConcatItemsd.__call___ConcatItemsd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 681, "end_line": 703, "span_ids": ["ConcatItemsd.__call__"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ConcatItemsd(MapTransform):\n\n def __call__(self, data):\n \"\"\"\n Raises:\n TypeError: When items in ``data`` differ in type.\n TypeError: When the item type is not in ``Union[numpy.ndarray, torch.Tensor]``.\n\n \"\"\"\n d = dict(data)\n output = []\n data_type = None\n for key in self.key_iterator(d):\n if data_type is None:\n data_type = type(d[key])\n elif not isinstance(d[key], data_type):\n raise TypeError(\"All items in data must have the same type.\")\n output.append(d[key])\n if data_type == np.ndarray:\n d[self.name] = np.concatenate(output, axis=self.dim)\n elif data_type == torch.Tensor:\n d[self.name] = torch.cat(output, dim=self.dim)\n else:\n raise TypeError(f\"Unsupported data type: {data_type}, available options are (numpy.ndarray, torch.Tensor).\")\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_Lambdad_Lambdad.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_Lambdad_Lambdad.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 706, "end_line": 748, "span_ids": ["Lambdad", "Lambdad.__init__", "Lambdad.__call__"], "tokens": 404}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Lambdad(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.Lambda`.\n\n For example:\n\n .. code-block:: python\n :emphasize-lines: 2\n\n input_data={'image': np.zeros((10, 2, 2)), 'label': np.ones((10, 2, 2))}\n lambd = Lambdad(keys='label', func=lambda x: x[:4, :, :])\n print(lambd(input_data)['label'].shape)\n (4, 2, 2)\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n func: Lambda/function to be applied. It also can be a sequence of Callable,\n each element corresponds to a key in ``keys``.\n overwrite: whether to overwrite the original data in the input dictionary with lamdbda function output.\n default to True. it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n allow_missing_keys: don't raise exception if key is missing.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n func: Union[Sequence[Callable], Callable],\n overwrite: Union[Sequence[bool], bool] = True,\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.func = ensure_tuple_rep(func, len(self.keys))\n self.overwrite = ensure_tuple_rep(overwrite, len(self.keys))\n self._lambd = Lambda()\n\n def __call__(self, data):\n d = dict(data)\n for key, func, overwrite in self.key_iterator(d, self.func, self.overwrite):\n ret = self._lambd(d[key], func=func)\n if overwrite:\n d[key] = ret\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_LabelToMaskd_LabelToMaskd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_LabelToMaskd_LabelToMaskd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 772, "end_line": 803, "span_ids": ["LabelToMaskd", "LabelToMaskd.__init__", "LabelToMaskd.__call__"], "tokens": 319}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LabelToMaskd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.LabelToMask`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n select_labels: labels to generate mask from. for 1 channel label, the `select_labels`\n is the expected label values, like: [1, 2, 3]. for One-Hot format label, the\n `select_labels` is the expected channel indices.\n merge_channels: whether to use `np.any()` to merge the result on channel dim.\n if yes, will return a single channel mask with binary data.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n\n def __init__( # pytype: disable=annotation-type-mismatch\n self,\n keys: KeysCollection,\n select_labels: Union[Sequence[int], int],\n merge_channels: bool = False,\n allow_missing_keys: bool = False,\n ) -> None: # pytype: disable=annotation-type-mismatch\n super().__init__(keys, allow_missing_keys)\n self.converter = LabelToMask(select_labels=select_labels, merge_channels=merge_channels)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_IdentityD_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_IdentityD_", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1046, "end_line": 1075, "span_ids": ["impl:3"], "tokens": 402}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "IdentityD = IdentityDict = Identityd\nAsChannelFirstD = AsChannelFirstDict = AsChannelFirstd\nAsChannelLastD = AsChannelLastDict = AsChannelLastd\nAddChannelD = AddChannelDict = AddChanneld\nEnsureChannelFirstD = EnsureChannelFirstDict = EnsureChannelFirstd\nRemoveRepeatedChannelD = RemoveRepeatedChannelDict = RemoveRepeatedChanneld\nRepeatChannelD = RepeatChannelDict = RepeatChanneld\nSplitChannelD = SplitChannelDict = SplitChanneld\nCastToTypeD = CastToTypeDict = CastToTyped\nToTensorD = ToTensorDict = ToTensord\nToNumpyD = ToNumpyDict = ToNumpyd\nToPILD = ToPILDict = ToPILd\nDeleteItemsD = DeleteItemsDict = DeleteItemsd\nSqueezeDimD = SqueezeDimDict = SqueezeDimd\nDataStatsD = DataStatsDict = DataStatsd\nSimulateDelayD = SimulateDelayDict = SimulateDelayd\nCopyItemsD = CopyItemsDict = CopyItemsd\nConcatItemsD = ConcatItemsDict = ConcatItemsd\nLambdaD = LambdaDict = Lambdad\nLabelToMaskD = LabelToMaskDict = LabelToMaskd\nFgBgToIndicesD = FgBgToIndicesDict = FgBgToIndicesd\nConvertToMultiChannelBasedOnBratsClassesD = (\n ConvertToMultiChannelBasedOnBratsClassesDict\n) = ConvertToMultiChannelBasedOnBratsClassesd\nAddExtremePointsChannelD = AddExtremePointsChannelDict = AddExtremePointsChanneld\nTorchVisionD = TorchVisionDict = TorchVisiond\nRandTorchVisionD = RandTorchVisionDict = RandTorchVisiond\nRandLambdaD = RandLambdaDict = RandLambdad\nMapLabelValueD = MapLabelValueDict = MapLabelValued", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_rescale_array_rescale_array._rescale_by_minv_and_max": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_rescale_array_rescale_array._rescale_by_minv_and_max", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 110, "end_line": 124, "span_ids": ["rescale_array"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def rescale_array(arr: np.ndarray, minv: float = 0.0, maxv: float = 1.0, dtype: DtypeLike = np.float32):\n \"\"\"\n Rescale the values of numpy array `arr` to be from `minv` to `maxv`.\n \"\"\"\n if dtype is not None:\n arr = arr.astype(dtype)\n\n mina = np.min(arr)\n maxa = np.max(arr)\n\n if mina == maxa:\n return arr * minv\n\n norm = (arr - mina) / (maxa - mina) # normalize the array first\n return (norm * (maxv - minv)) + minv # rescale by minv and maxv, which is the normalized array by default", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_copypaste_arrays_copypaste_arrays.return.tuple_srcslices_tuple_d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_copypaste_arrays_copypaste_arrays.return.tuple_srcslices_tuple_d", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 148, "end_line": 205, "span_ids": ["copypaste_arrays"], "tokens": 662}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def copypaste_arrays(\n src_shape,\n dest_shape,\n srccenter: Sequence[int],\n destcenter: Sequence[int],\n dims: Sequence[Optional[int]],\n) -> Tuple[Tuple[slice, ...], Tuple[slice, ...]]:\n \"\"\"\n Calculate the slices to copy a sliced area of array in `src_shape` into array in `dest_shape`.\n\n The area has dimensions `dims` (use 0 or None to copy everything in that dimension),\n the source area is centered at `srccenter` index in `src` and copied into area centered at `destcenter` in `dest`.\n The dimensions of the copied area will be clipped to fit within the\n source and destination arrays so a smaller area may be copied than expected. Return value is the tuples of slice\n objects indexing the copied area in `src`, and those indexing the copy area in `dest`.\n\n Example\n\n .. code-block:: python\n\n src_shape = (6,6)\n src = np.random.randint(0,10,src_shape)\n dest = np.zeros_like(src)\n srcslices, destslices = copypaste_arrays(src_shape, dest.shape, (3, 2),(2, 1),(3, 4))\n dest[destslices] = src[srcslices]\n print(src)\n print(dest)\n\n >>> [[9 5 6 6 9 6]\n [4 3 5 6 1 2]\n [0 7 3 2 4 1]\n [3 0 0 1 5 1]\n [9 4 7 1 8 2]\n [6 6 5 8 6 7]]\n [[0 0 0 0 0 0]\n [7 3 2 4 0 0]\n [0 0 1 5 0 0]\n [4 7 1 8 0 0]\n [0 0 0 0 0 0]\n [0 0 0 0 0 0]]\n\n \"\"\"\n s_ndim = len(src_shape)\n d_ndim = len(dest_shape)\n srcslices = [slice(None)] * s_ndim\n destslices = [slice(None)] * d_ndim\n\n for i, ss, ds, sc, dc, dim in zip(range(s_ndim), src_shape, dest_shape, srccenter, destcenter, dims):\n if dim:\n # dimension before midpoint, clip to size fitting in both arrays\n d1 = np.clip(dim // 2, 0, min(sc, dc))\n # dimension after midpoint, clip to size fitting in both arrays\n d2 = np.clip(dim // 2 + 1, 0, min(ss - sc, ds - dc))\n\n srcslices[i] = slice(sc - d1, sc + d2)\n destslices[i] = slice(dc - d1, dc + d2)\n\n return tuple(srcslices), tuple(destslices)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_grid_create_grid.return.np_concatenate_coords_n": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_grid_create_grid.return.np_concatenate_coords_n", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 381, "end_line": 401, "span_ids": ["create_grid"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_grid(\n spatial_size: Sequence[int],\n spacing: Optional[Sequence[float]] = None,\n homogeneous: bool = True,\n dtype: DtypeLike = float,\n):\n \"\"\"\n compute a `spatial_size` mesh.\n\n Args:\n spatial_size: spatial size of the grid.\n spacing: same len as ``spatial_size``, defaults to 1.0 (dense grid).\n homogeneous: whether to make homogeneous coordinates.\n dtype: output grid data type.\n \"\"\"\n spacing = spacing or tuple(1.0 for _ in spatial_size)\n ranges = [np.linspace(-(d - 1.0) / 2.0 * s, (d - 1.0) / 2.0 * s, int(d)) for d, s in zip(spatial_size, spacing)]\n coords = np.asarray(np.meshgrid(*ranges, indexing=\"ij\"), dtype=dtype)\n if not homogeneous:\n return coords\n return np.concatenate([coords, np.ones_like(coords[:1])])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_control_grid_create_control_grid.return.create_grid_grid_shape_s": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_control_grid_create_control_grid.return.create_grid_grid_shape_s", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 404, "end_line": 417, "span_ids": ["create_control_grid"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_control_grid(\n spatial_shape: Sequence[int], spacing: Sequence[float], homogeneous: bool = True, dtype: DtypeLike = float\n):\n \"\"\"\n control grid with two additional point in each direction\n \"\"\"\n grid_shape = []\n for d, s in zip(spatial_shape, spacing):\n d = int(d)\n if d % 2 == 0:\n grid_shape.append(np.ceil((d - 1.0) / (2.0 * s) + 0.5) * 2.0 + 2.0)\n else:\n grid_shape.append(np.ceil((d - 1.0) / (2.0 * s)) * 2.0 + 3.0)\n return create_grid(grid_shape, spacing, homogeneous, dtype)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_shear_create_shear.raise_NotImplementedError": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_shear_create_shear.raise_NotImplementedError", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 379, "end_line": 404, "span_ids": ["create_shear"], "tokens": 301}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_shear(spatial_dims: int, coefs: Union[Sequence[float], float]) -> np.ndarray:\n \"\"\"\n create a shearing matrix\n\n Args:\n spatial_dims: spatial rank\n coefs: shearing factors, defaults to 0.\n\n Raises:\n NotImplementedError: When ``spatial_dims`` is not one of [2, 3].\n\n \"\"\"\n if spatial_dims == 2:\n coefs = ensure_tuple_size(coefs, dim=2, pad_val=0.0)\n return np.array([[1, coefs[0], 0.0], [coefs[1], 1.0, 0.0], [0.0, 0.0, 1.0]])\n if spatial_dims == 3:\n coefs = ensure_tuple_size(coefs, dim=6, pad_val=0.0)\n return np.array(\n [\n [1.0, coefs[0], coefs[1], 0.0],\n [coefs[2], 1.0, coefs[3], 0.0],\n [coefs[4], coefs[5], 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0],\n ]\n )\n raise NotImplementedError(\"Currently only spatial_dims in [2, 3] are supported.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_generate_spatial_bounding_box_generate_spatial_bounding_box.return.box_start_box_end": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_generate_spatial_bounding_box_generate_spatial_bounding_box.return.box_start_box_end", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 525, "end_line": 570, "span_ids": ["generate_spatial_bounding_box"], "tokens": 531}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def generate_spatial_bounding_box(\n img: np.ndarray,\n select_fn: Callable = lambda x: x > 0,\n channel_indices: Optional[IndexSelection] = None,\n margin: Union[Sequence[int], int] = 0,\n) -> Tuple[List[int], List[int]]:\n \"\"\"\n generate the spatial bounding box of foreground in the image with start-end positions.\n Users can define arbitrary function to select expected foreground from the whole image or specified channels.\n And it can also add margin to every dim of the bounding box.\n The output format of the coordinates is:\n\n [1st_spatial_dim_start, 2nd_spatial_dim_start, ..., Nth_spatial_dim_start],\n [1st_spatial_dim_end, 2nd_spatial_dim_end, ..., Nth_spatial_dim_end]\n\n The bounding boxes edges are aligned with the input image edges.\n This function returns [-1, -1, ...], [-1, -1, ...] if there's no positive intensity.\n\n Args:\n img: source image to generate bounding box from.\n select_fn: function to select expected foreground, default is to select values > 0.\n channel_indices: if defined, select foreground only on the specified channels\n of image. if None, select foreground on the whole image.\n margin: add margin value to spatial dims of the bounding box, if only 1 value provided, use it for all dims.\n \"\"\"\n data = img[list(ensure_tuple(channel_indices))] if channel_indices is not None else img\n data = np.any(select_fn(data), axis=0)\n ndim = len(data.shape)\n margin = ensure_tuple_rep(margin, ndim)\n for m in margin:\n if m < 0:\n raise ValueError(\"margin value should not be negative number.\")\n\n box_start = [0] * ndim\n box_end = [0] * ndim\n\n for di, ax in enumerate(itertools.combinations(reversed(range(ndim)), ndim - 1)):\n dt = data.any(axis=ax)\n if not np.any(dt):\n return [-1] * ndim, [-1] * ndim\n\n min_d = max(np.argmax(dt) - margin[di], 0)\n max_d = max(data.shape[di] - max(np.argmax(dt[::-1]) - margin[di], 0), min_d + 1)\n box_start[di], box_end[di] = min_d, max_d\n\n return box_start, box_end", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/aliases.py_resolve_name_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/aliases.py_resolve_name_", "embedding": null, "metadata": {"file_path": "monai/utils/aliases.py", "file_name": "aliases.py", "file_type": "text/x-python", "category": "implementation", "start_line": 46, "end_line": 103, "span_ids": ["resolve_name"], "tokens": 609}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def resolve_name(name):\n \"\"\"\n Search for the declaration (function or class) with the given name. This will first search the list of aliases to\n see if it was declared with this aliased name, then search treating `name` as a fully qualified name, then search\n the loaded modules for one having a declaration with the given name. If no declaration is found, raise ValueError.\n\n Raises:\n ValueError: When the module is not found.\n ValueError: When the module does not have the specified member.\n ValueError: When multiple modules with the declaration name are found.\n ValueError: When no module with the specified member is found.\n\n \"\"\"\n # attempt to resolve an alias\n with alias_lock:\n obj = GlobalAliases.get(name)\n\n if name in GlobalAliases and obj is None:\n raise AssertionError\n\n # attempt to resolve a qualified name\n if obj is None and \".\" in name:\n modname, declname = name.rsplit(\".\", 1)\n\n try:\n mod = importlib.import_module(modname)\n obj = getattr(mod, declname, None)\n except ModuleNotFoundError:\n raise ValueError(f\"Module {modname!r} not found.\")\n\n if obj is None:\n raise ValueError(f\"Module {modname!r} does not have member {declname!r}.\")\n\n # attempt to resolve a simple name\n if obj is None:\n # Get all modules having the declaration/import, need to check here that getattr returns something which doesn't\n # equate to False since in places __getattr__ returns 0 incorrectly:\n # https://github.com/tensorflow/tensorboard/blob/a22566561d2b4fea408755a951ac9eaf3a156f8e/tensorboard/compat/tensorflow_stub/pywrap_tensorflow.py#L35 # noqa: B950\n mods = [m for m in list(sys.modules.values()) if getattr(m, name, None)]\n\n if len(mods) > 0: # found modules with this declaration or import\n if len(mods) > 1: # found multiple modules, need to determine if ambiguous or just multiple imports\n foundmods = {inspect.getmodule(getattr(m, name)) for m in mods} # resolve imports\n foundmods = {m for m in foundmods if m is not None}\n\n if len(foundmods) > 1: # found multiple declarations with the same name\n modnames = [m.__name__ for m in foundmods]\n msg = f\"Multiple modules ({modnames!r}) with declaration name {name!r} found, resolution is ambiguous.\"\n raise ValueError(msg)\n mods = list(foundmods)\n\n obj = getattr(mods[0], name)\n\n if obj is None:\n raise ValueError(f\"No module with member {name!r} found.\")\n\n return obj", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_fall_back_tuple_fall_back_tuple.return.tuple_use_the_default": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_fall_back_tuple_fall_back_tuple.return.tuple_use_the_default", "embedding": null, "metadata": {"file_path": "monai/utils/misc.py", "file_name": "misc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 134, "end_line": 177, "span_ids": ["fall_back_tuple"], "tokens": 466}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def fall_back_tuple(\n user_provided: Any, default: Union[Sequence, np.ndarray], func: Callable = lambda x: x and x > 0\n) -> Tuple[Any, ...]:\n \"\"\"\n Refine `user_provided` according to the `default`, and returns as a validated tuple.\n\n The validation is done for each element in `user_provided` using `func`.\n If `func(user_provided[idx])` returns False, the corresponding `default[idx]` will be used\n as the fallback.\n\n Typically used when `user_provided` is a tuple of window size provided by the user,\n `default` is defined by data, this function returns an updated `user_provided` with its non-positive\n components replaced by the corresponding components from `default`.\n\n Args:\n user_provided: item to be validated.\n default: a sequence used to provided the fallbacks.\n func: a Callable to validate every components of `user_provided`.\n\n Examples::\n\n >>> fall_back_tuple((1, 2), (32, 32))\n (1, 2)\n >>> fall_back_tuple(None, (32, 32))\n (32, 32)\n >>> fall_back_tuple((-1, 10), (32, 32))\n (32, 10)\n >>> fall_back_tuple((-1, None), (32, 32))\n (32, 32)\n >>> fall_back_tuple((1, None), (32, 32))\n (1, 32)\n >>> fall_back_tuple(0, (32, 32))\n (32, 32)\n >>> fall_back_tuple(range(3), (32, 64, 48))\n (32, 1, 2)\n >>> fall_back_tuple([0], (32, 32))\n ValueError: Sequence must have length 2, got length 1.\n\n \"\"\"\n ndim = len(default)\n user = ensure_tuple_rep(user_provided, ndim)\n return tuple( # use the default values if user provided is not valid\n user_c if func(user_c) else default_c for default_c, user_c in zip(default, user)\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_is_scalar_tensor_get_seed.return._seed": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_is_scalar_tensor_get_seed.return._seed", "embedding": null, "metadata": {"file_path": "monai/utils/misc.py", "file_name": "misc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 180, "end_line": 212, "span_ids": ["is_scalar", "is_scalar_tensor", "progress_bar", "get_seed"], "tokens": 296}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def is_scalar_tensor(val: Any) -> bool:\n if isinstance(val, torch.Tensor) and val.ndim == 0:\n return True\n return False\n\n\ndef is_scalar(val: Any) -> bool:\n if isinstance(val, torch.Tensor) and val.ndim == 0:\n return True\n return bool(np.isscalar(val))\n\n\ndef progress_bar(index: int, count: int, desc: Optional[str] = None, bar_len: int = 30, newline: bool = False) -> None:\n \"\"\"print a progress bar to track some time consuming task.\n\n Args:\n index: current status in progress.\n count: total steps of the progress.\n desc: description of the progress bar, if not None, show before the progress bar.\n bar_len: the total length of the bar on screen, default is 30 char.\n newline: whether to print in a new line for every index.\n \"\"\"\n end = \"\\r\" if newline is False else \"\\r\\n\"\n filled_len = int(bar_len * index // count)\n bar = f\"{desc} \" if desc is not None else \"\"\n bar += \"[\" + \"=\" * filled_len + \" \" * (bar_len - filled_len) + \"]\"\n print(f\"{index}/{count} {bar}\", end=end)\n if index == count:\n print(\"\")\n\n\ndef get_seed() -> Optional[int]:\n return _seed", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_optional_import_optional_import.msg.descriptor_format_actual_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_optional_import_optional_import.msg.descriptor_format_actual_", "embedding": null, "metadata": {"file_path": "monai/utils/module.py", "file_name": "module.py", "file_type": "text/x-python", "category": "implementation", "start_line": 134, "end_line": 210, "span_ids": ["optional_import"], "tokens": 759}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def optional_import(\n module: str,\n version: str = \"\",\n version_checker: Callable[..., bool] = min_version,\n name: str = \"\",\n descriptor: str = OPTIONAL_IMPORT_MSG_FMT,\n version_args=None,\n allow_namespace_pkg: bool = False,\n) -> Tuple[Any, bool]:\n \"\"\"\n Imports an optional module specified by `module` string.\n Any importing related exceptions will be stored, and exceptions raise lazily\n when attempting to use the failed-to-import module.\n\n Args:\n module: name of the module to be imported.\n version: version string used by the version_checker.\n version_checker: a callable to check the module version, Defaults to monai.utils.min_version.\n name: a non-module attribute (such as method/class) to import from the imported module.\n descriptor: a format string for the final error message when using a not imported module.\n version_args: additional parameters to the version checker.\n allow_namespace_pkg: whether importing a namespace package is allowed. Defaults to False.\n\n Returns:\n The imported module and a boolean flag indicating whether the import is successful.\n\n Examples::\n\n >>> torch, flag = optional_import('torch', '1.1')\n >>> print(torch, flag)\n True\n\n >>> the_module, flag = optional_import('unknown_module')\n >>> print(flag)\n False\n >>> the_module.method # trying to access a module which is not imported\n OptionalImportError: import unknown_module (No module named 'unknown_module').\n\n >>> torch, flag = optional_import('torch', '42', exact_version)\n >>> torch.nn # trying to access a module for which there isn't a proper version imported\n OptionalImportError: import torch (requires version '42' by 'exact_version').\n\n >>> conv, flag = optional_import('torch.nn.functional', '1.0', name='conv1d')\n >>> print(conv)\n \n\n >>> conv, flag = optional_import('torch.nn.functional', '42', name='conv1d')\n >>> conv() # trying to use a function from the not successfully imported module (due to unmatched version)\n OptionalImportError: from torch.nn.functional import conv1d (requires version '42' by 'min_version').\n \"\"\"\n\n tb = None\n exception_str = \"\"\n if name:\n actual_cmd = f\"from {module} import {name}\"\n else:\n actual_cmd = f\"import {module}\"\n try:\n pkg = __import__(module) # top level module\n the_module = import_module(module)\n if not allow_namespace_pkg:\n is_namespace = getattr(the_module, \"__file__\", None) is None and hasattr(the_module, \"__path__\")\n if is_namespace:\n raise AssertionError\n if name: # user specified to load class/function/... from the module\n the_module = getattr(the_module, name)\n except Exception as import_exception: # any exceptions during import\n tb = import_exception.__traceback__\n exception_str = f\"{import_exception}\"\n else: # found the module\n if version_args and version_checker(pkg, f\"{version}\", version_args):\n return the_module, True\n if not version_args and version_checker(pkg, f\"{version}\"):\n return the_module, True\n\n # preparing lazy error message\n msg = descriptor.format(actual_cmd)\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_make_animated_gif_summary_make_animated_gif_summary.return.summary_op": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_make_animated_gif_summary_make_animated_gif_summary.return.summary_op", "embedding": null, "metadata": {"file_path": "monai/visualize/img2tensorboard.py", "file_name": "img2tensorboard.py", "file_type": "text/x-python", "category": "implementation", "start_line": 61, "end_line": 102, "span_ids": ["make_animated_gif_summary"], "tokens": 413}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def make_animated_gif_summary(\n tag: str,\n image: Union[np.ndarray, torch.Tensor],\n max_out: int = 3,\n animation_axes: Sequence[int] = (3,),\n image_axes: Sequence[int] = (1, 2),\n other_indices: Optional[Dict] = None,\n scale_factor: float = 1.0,\n) -> Summary:\n \"\"\"Creates an animated gif out of an image tensor in 'CHWD' format and returns Summary.\n\n Args:\n tag: Data identifier\n image: The image, expected to be in CHWD format\n max_out: maximum number of slices to animate through\n animation_axes: axis to animate on (not currently used)\n image_axes: axes of image (not currently used)\n other_indices: (not currently used)\n scale_factor: amount to multiply values by.\n if the image data is between 0 and 1, using 255 for this value will scale it to displayable range\n \"\"\"\n\n suffix = \"/image\" if max_out == 1 else \"/image/{}\"\n if other_indices is None:\n other_indices = {}\n axis_order = [0] + list(animation_axes) + list(image_axes)\n\n slicing = []\n for i in range(len(image.shape)):\n if i in axis_order:\n slicing.append(slice(None))\n else:\n other_ind = other_indices.get(i, 0)\n slicing.append(slice(other_ind, other_ind + 1))\n image = image[tuple(slicing)]\n\n for it_i in range(min(max_out, list(image.shape)[0])):\n one_channel_img: Union[torch.Tensor, np.ndarray] = (\n image[it_i, :, :, :].squeeze(dim=0) if isinstance(image, torch.Tensor) else image[it_i, :, :, :]\n )\n summary_op = _image3_animated_gif(tag + suffix.format(it_i), one_channel_img, scale_factor)\n return summary_op", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_add_animated_gif_add_animated_gif.writer__get_file_writer_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_add_animated_gif_add_animated_gif.writer__get_file_writer_", "embedding": null, "metadata": {"file_path": "monai/visualize/img2tensorboard.py", "file_name": "img2tensorboard.py", "file_type": "text/x-python", "category": "implementation", "start_line": 102, "end_line": 126, "span_ids": ["add_animated_gif"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def add_animated_gif(\n writer: SummaryWriter,\n tag: str,\n image_tensor: Union[np.ndarray, torch.Tensor],\n max_out: int,\n scale_factor: float,\n global_step: Optional[int] = None,\n) -> None:\n \"\"\"Creates an animated gif out of an image tensor in 'CHWD' format and writes it with SummaryWriter.\n\n Args:\n writer: Tensorboard SummaryWriter to write to\n tag: Data identifier\n image_tensor: tensor for the image to add, expected to be in CHWD format\n max_out: maximum number of slices to animate through\n scale_factor: amount to multiply values by. If the image data is between 0 and 1, using 255 for this value will\n scale it to displayable range\n global_step: Global step value to record\n \"\"\"\n writer._get_file_writer().add_summary(\n make_animated_gif_summary(\n tag, image_tensor, max_out=max_out, animation_axes=[1], image_axes=[2, 3], scale_factor=scale_factor\n ),\n global_step,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_add_animated_gif_no_channels_add_animated_gif_no_channels.writer__get_file_writer_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_add_animated_gif_no_channels_add_animated_gif_no_channels.writer__get_file_writer_", "embedding": null, "metadata": {"file_path": "monai/visualize/img2tensorboard.py", "file_name": "img2tensorboard.py", "file_type": "text/x-python", "category": "implementation", "start_line": 129, "end_line": 155, "span_ids": ["add_animated_gif_no_channels"], "tokens": 268}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def add_animated_gif_no_channels(\n writer: SummaryWriter,\n tag: str,\n image_tensor: Union[np.ndarray, torch.Tensor],\n max_out: int,\n scale_factor: float,\n global_step: Optional[int] = None,\n) -> None:\n \"\"\"Creates an animated gif out of an image tensor in 'HWD' format that does not have\n a channel dimension and writes it with SummaryWriter. This is similar to the \"add_animated_gif\"\n after inserting a channel dimension of 1.\n\n Args:\n writer: Tensorboard SummaryWriter to write to\n tag: Data identifier\n image_tensor: tensor for the image to add, expected to be in CHWD format\n max_out: maximum number of slices to animate through\n scale_factor: amount to multiply values by. If the image data is between 0 and 1,\n using 255 for this value will scale it to displayable range\n global_step: Global step value to record\n \"\"\"\n writer._get_file_writer().add_summary(\n make_animated_gif_summary(\n tag, image_tensor, max_out=max_out, animation_axes=[1], image_axes=[1, 2], scale_factor=scale_factor\n ),\n global_step,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_plot_2d_or_3d_image_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py_plot_2d_or_3d_image_", "embedding": null, "metadata": {"file_path": "monai/visualize/img2tensorboard.py", "file_name": "img2tensorboard.py", "file_type": "text/x-python", "category": "implementation", "start_line": 161, "end_line": 210, "span_ids": ["plot_2d_or_3d_image"], "tokens": 529}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def plot_2d_or_3d_image(\n data: Union[torch.Tensor, np.ndarray],\n step: int,\n writer: SummaryWriter,\n index: int = 0,\n max_channels: int = 1,\n max_frames: int = 64,\n tag: str = \"output\",\n) -> None:\n \"\"\"Plot 2D or 3D image on the TensorBoard, 3D image will be converted to GIF image.\n\n Note:\n Plot 3D or 2D image(with more than 3 channels) as separate images.\n\n Args:\n data: target data to be plotted as image on the TensorBoard.\n The data is expected to have 'NCHW[D]' dimensions, and only plot the first in the batch.\n step: current step to plot in a chart.\n writer: specify TensorBoard SummaryWriter to plot the image.\n index: plot which element in the input data batch, default is the first element.\n max_channels: number of channels to plot.\n max_frames: number of frames for 2D-t plot.\n tag: tag of the plotted image on TensorBoard.\n \"\"\"\n d = data[index].detach().cpu().numpy() if isinstance(data, torch.Tensor) else data[index]\n\n if d.ndim == 2:\n d = rescale_array(d, 0, 1)\n dataformats = \"HW\"\n writer.add_image(f\"{tag}_{dataformats}\", d, step, dataformats=dataformats)\n return\n\n if d.ndim == 3:\n if d.shape[0] == 3 and max_channels == 3: # RGB\n dataformats = \"CHW\"\n writer.add_image(f\"{tag}_{dataformats}\", d, step, dataformats=dataformats)\n return\n dataformats = \"HW\"\n for j, d2 in enumerate(d[:max_channels]):\n d2 = rescale_array(d2, 0, 1)\n writer.add_image(f\"{tag}_{dataformats}_{j}\", d2, step, dataformats=dataformats)\n return\n\n if d.ndim >= 4:\n spatial = d.shape[-3:]\n for j, d3 in enumerate(d.reshape([-1] + list(spatial))[:max_channels]):\n d3 = rescale_array(d3, 0, 255)\n add_animated_gif(writer, f\"{tag}_HWD_{j}\", d3[None], max_frames, 1.0, step)\n return", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/__init__.py_sys_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/__init__.py_sys_", "embedding": null, "metadata": {"file_path": "tests/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 38, "span_ids": ["_enter_pr_4800", "impl", "docstring"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nimport unittest\nimport warnings\n\n\ndef _enter_pr_4800(self):\n \"\"\"\n code from https://github.com/python/cpython/pull/4800\n \"\"\"\n # The __warningregistry__'s need to be in a pristine state for tests\n # to work properly.\n for v in list(sys.modules.values()):\n if getattr(v, \"__warningregistry__\", None):\n v.__warningregistry__ = {}\n self.warnings_manager = warnings.catch_warnings(record=True)\n self.warnings = self.warnings_manager.__enter__()\n warnings.simplefilter(\"always\", self.expected)\n return self\n\n\n# workaround for https://bugs.python.org/issue29620\ntry:\n # Suppression for issue #494: tests/__init__.py:34: error: Cannot assign to a method\n unittest.case._AssertWarnsContext.__enter__ = _enter_pr_4800 # type: ignore\nexcept AttributeError:\n pass", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activationsd.py_unittest_TEST_CASE_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activationsd.py_unittest_TEST_CASE_3._", "embedding": null, "metadata": {"file_path": "tests/test_activationsd.py", "file_name": "test_activationsd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["docstring"], "tokens": 482}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import Activationsd\n\nTEST_CASE_1 = [\n {\"keys\": [\"pred\", \"label\"], \"sigmoid\": False, \"softmax\": [True, False], \"other\": None},\n {\"pred\": torch.tensor([[[[0.0, 1.0]], [[2.0, 3.0]]]]), \"label\": torch.tensor([[[[0.0, 1.0]], [[2.0, 3.0]]]])},\n {\n \"pred\": torch.tensor([[[[0.1192, 0.1192]], [[0.8808, 0.8808]]]]),\n \"label\": torch.tensor([[[[0.0, 1.0]], [[2.0, 3.0]]]]),\n },\n (1, 2, 1, 2),\n]\n\nTEST_CASE_2 = [\n {\"keys\": [\"pred\", \"label\"], \"sigmoid\": False, \"softmax\": False, \"other\": [torch.tanh, None]},\n {\"pred\": torch.tensor([[[[0.0, 1.0], [2.0, 3.0]]]]), \"label\": torch.tensor([[[[0.0, 1.0], [2.0, 3.0]]]])},\n {\n \"pred\": torch.tensor([[[[0.0000, 0.7616], [0.9640, 0.9951]]]]),\n \"label\": torch.tensor([[[[0.0, 1.0], [2.0, 3.0]]]]),\n },\n (1, 1, 2, 2),\n]\n\nTEST_CASE_3 = [\n {\"keys\": \"pred\", \"sigmoid\": False, \"softmax\": False, \"other\": torch.tanh},\n {\"pred\": torch.tensor([[[[0.0, 1.0], [2.0, 3.0]]]])},\n {\"pred\": torch.tensor([[[[0.0000, 0.7616], [0.9640, 0.9951]]]])},\n (1, 1, 2, 2),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activationsd.py_TestActivationsd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_activationsd.py_TestActivationsd_", "embedding": null, "metadata": {"file_path": "tests/test_activationsd.py", "file_name": "test_activationsd.py", "file_type": "text/x-python", "category": "test", "start_line": 45, "end_line": 58, "span_ids": ["TestActivationsd.test_value_shape", "TestActivationsd", "impl:7"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestActivationsd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value_shape(self, input_param, test_input, output, expected_shape):\n result = Activationsd(**input_param)(test_input)\n torch.testing.assert_allclose(result[\"pred\"], output[\"pred\"])\n self.assertTupleEqual(result[\"pred\"].shape, expected_shape)\n if \"label\" in result:\n torch.testing.assert_allclose(result[\"label\"], output[\"label\"])\n self.assertTupleEqual(result[\"label\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestAdaptors.test_multi_in_single_out_TestAdaptors.test_multi_in_single_out.None_2.self_assertEqual_dres_lb": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestAdaptors.test_multi_in_single_out_TestAdaptors.test_multi_in_single_out.None_2.self_assertEqual_dres_lb", "embedding": null, "metadata": {"file_path": "tests/test_adaptors.py", "file_name": "test_adaptors.py", "file_type": "text/x-python", "category": "test", "start_line": 56, "end_line": 85, "span_ids": ["TestAdaptors.test_multi_in_single_out"], "tokens": 320}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAdaptors(unittest.TestCase):\n\n def test_multi_in_single_out(self):\n def foo(image, label):\n return image * label\n\n it = itertools.product([\"image\", [\"image\"]], [None, [\"image\", \"label\"], {\"image\": \"image\", \"label\": \"label\"}])\n\n for i in it:\n d = {\"image\": 2, \"label\": 3}\n dres = adaptor(foo, i[0], i[1])(d)\n self.assertEqual(dres[\"image\"], 6)\n self.assertEqual(dres[\"label\"], 3)\n\n it = itertools.product(\n [\"newimage\", [\"newimage\"]], [None, [\"image\", \"label\"], {\"image\": \"image\", \"label\": \"label\"}]\n )\n\n for i in it:\n d = {\"image\": 2, \"label\": 3}\n dres = adaptor(foo, i[0], i[1])(d)\n self.assertEqual(dres[\"image\"], 2)\n self.assertEqual(dres[\"label\"], 3)\n self.assertEqual(dres[\"newimage\"], 6)\n\n it = itertools.product([\"img\", [\"img\"]], [{\"img\": \"image\", \"lbl\": \"label\"}])\n\n for i in it:\n d = {\"img\": 2, \"lbl\": 3}\n dres = adaptor(foo, i[0], i[1])(d)\n self.assertEqual(dres[\"img\"], 6)\n self.assertEqual(dres[\"lbl\"], 3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestAdaptors.test_default_arg_single_out_TestAdaptors.test_dict_out.self_assertEqual_dres_b_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestAdaptors.test_default_arg_single_out_TestAdaptors.test_dict_out.self_assertEqual_dres_b_", "embedding": null, "metadata": {"file_path": "tests/test_adaptors.py", "file_name": "test_adaptors.py", "file_type": "text/x-python", "category": "test", "start_line": 87, "end_line": 118, "span_ids": ["TestAdaptors.test_multi_out", "TestAdaptors.test_dict_out", "TestAdaptors.test_default_arg_single_out"], "tokens": 270}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAdaptors(unittest.TestCase):\n\n def test_default_arg_single_out(self):\n def foo(a, b=2):\n return a * b\n\n d = {\"a\": 5}\n dres = adaptor(foo, \"c\")(d)\n self.assertEqual(dres[\"c\"], 10)\n\n d = {\"b\": 5}\n with self.assertRaises(TypeError):\n dres = adaptor(foo, \"c\")(d)\n\n def test_multi_out(self):\n def foo(a, b):\n return a * b, a / b\n\n d = {\"a\": 3, \"b\": 4}\n dres = adaptor(foo, [\"c\", \"d\"])(d)\n self.assertEqual(dres[\"c\"], 12)\n self.assertEqual(dres[\"d\"], 3 / 4)\n\n def test_dict_out(self):\n def foo(a):\n return {\"a\": a * 2}\n\n d = {\"a\": 2}\n dres = adaptor(foo, {\"a\": \"a\"})(d)\n self.assertEqual(dres[\"a\"], 4)\n\n d = {\"b\": 2}\n dres = adaptor(foo, {\"a\": \"b\"}, {\"b\": \"a\"})(d)\n self.assertEqual(dres[\"b\"], 4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestApplyAlias_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_TestApplyAlias_", "embedding": null, "metadata": {"file_path": "tests/test_adaptors.py", "file_name": "test_adaptors.py", "file_type": "text/x-python", "category": "test", "start_line": 121, "end_line": 149, "span_ids": ["TestToKwargs", "TestToKwargs.test_to_kwargs", "TestApplyAlias", "TestApplyAlias.test_apply_alias"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestApplyAlias(unittest.TestCase):\n def test_apply_alias(self):\n def foo(d):\n d[\"x\"] *= 2\n return d\n\n d = {\"a\": 1, \"b\": 3}\n result = apply_alias(foo, {\"b\": \"x\"})(d)\n self.assertDictEqual({\"a\": 1, \"b\": 6}, result)\n\n\nclass TestToKwargs(unittest.TestCase):\n def test_to_kwargs(self):\n def foo(**kwargs):\n results = {k: v * 2 for k, v in kwargs.items()}\n return results\n\n def compose_like(fn, data):\n data = fn(data)\n return data\n\n d = {\"a\": 1, \"b\": 2}\n\n actual = compose_like(to_kwargs(foo), d)\n self.assertDictEqual(actual, {\"a\": 2, \"b\": 4})\n\n with self.assertRaises(TypeError):\n actual = compose_like(foo, d)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_add_channeld.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_add_channeld.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_add_channeld.py", "file_name": "test_add_channeld.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 36, "span_ids": ["TestAddChanneld.test_shape", "TestAddChanneld", "impl:3", "docstring"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import AddChanneld\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\", \"seg\"]},\n {\"img\": np.array([[0, 1], [1, 2]]), \"seg\": np.array([[0, 1], [1, 2]])},\n (1, 2, 2),\n]\n\n\nclass TestAddChanneld(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1])\n def test_shape(self, input_param, input_data, expected_shape):\n result = AddChanneld(**input_param)(input_data)\n self.assertEqual(result[\"img\"].shape, expected_shape)\n self.assertEqual(result[\"seg\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adjust_contrast.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adjust_contrast.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_adjust_contrast.py", "file_name": "test_adjust_contrast.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["TestAdjustContrast.test_correct_results", "impl:7", "TestAdjustContrast", "docstring"], "tokens": 237}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import AdjustContrast\nfrom tests.utils import NumpyImageTestCase2D\n\nTEST_CASE_1 = [1.0]\n\nTEST_CASE_2 = [0.5]\n\nTEST_CASE_3 = [4.5]\n\n\nclass TestAdjustContrast(NumpyImageTestCase2D):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_correct_results(self, gamma):\n adjuster = AdjustContrast(gamma=gamma)\n result = adjuster(self.imt)\n if gamma == 1.0:\n expected = self.imt\n else:\n epsilon = 1e-7\n img_min = self.imt.min()\n img_range = self.imt.max() - img_min\n expected = np.power(((self.imt - img_min) / float(img_range + epsilon)), gamma) * img_range + img_min\n np.testing.assert_allclose(expected, result, rtol=1e-05)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adjust_contrastd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adjust_contrastd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_adjust_contrastd.py", "file_name": "test_adjust_contrastd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["TestAdjustContrastd.test_correct_results", "TestAdjustContrastd", "impl:7", "docstring"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import AdjustContrastd\nfrom tests.utils import NumpyImageTestCase2D\n\nTEST_CASE_1 = [1.0]\n\nTEST_CASE_2 = [0.5]\n\nTEST_CASE_3 = [4.5]\n\n\nclass TestAdjustContrastd(NumpyImageTestCase2D):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_correct_results(self, gamma):\n adjuster = AdjustContrastd(\"img\", gamma=gamma)\n result = adjuster({\"img\": self.imt})\n if gamma == 1.0:\n expected = self.imt\n else:\n epsilon = 1e-7\n img_min = self.imt.min()\n img_range = self.imt.max() - img_min\n expected = np.power(((self.imt - img_min) / float(img_range + epsilon)), gamma) * img_range + img_min\n np.testing.assert_allclose(expected, result[\"img\"], rtol=1e-05)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_affine.py", "file_name": "test_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 74, "span_ids": ["docstring"], "tokens": 1300}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import Affine\n\nTEST_CASES = [\n [\n dict(padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"img\": np.arange(9).reshape((1, 3, 3)), \"spatial_size\": (-1, 0)},\n np.arange(9).reshape(1, 3, 3),\n ],\n [\n dict(padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"img\": np.arange(4).reshape((1, 2, 2))},\n np.arange(4).reshape(1, 2, 2),\n ],\n [\n dict(padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"img\": np.arange(4).reshape((1, 2, 2)), \"spatial_size\": (4, 4)},\n np.array([[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 2.0, 3.0, 0.0], [0.0, 0.0, 0.0, 0.0]]]),\n ],\n [\n dict(rotate_params=[np.pi / 2], padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"img\": np.arange(4).reshape((1, 2, 2)), \"spatial_size\": (4, 4)},\n np.array([[[0.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 3.0, 1.0, 0.0], [0.0, 0.0, 0.0, 0.0]]]),\n ],\n [\n dict(padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"img\": np.arange(27).reshape((1, 3, 3, 3)), \"spatial_size\": (-1, 0, 0)},\n np.arange(27).reshape(1, 3, 3, 3),\n ],\n [\n dict(padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"img\": np.arange(8).reshape((1, 2, 2, 2)), \"spatial_size\": (4, 4, 4)},\n np.array(\n [\n [\n [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 2.0, 3.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 4.0, 5.0, 0.0], [0.0, 6.0, 7.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n ]\n ]\n ),\n ],\n [\n dict(rotate_params=[np.pi / 2], padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"img\": np.arange(8).reshape((1, 2, 2, 2)), \"spatial_size\": (4, 4, 4)},\n np.array(\n [\n [\n [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 3.0, 1.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 6.0, 4.0, 0.0], [0.0, 7.0, 5.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n ]\n ]\n ),\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine.py_TestAffine_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine.py_TestAffine_", "embedding": null, "metadata": {"file_path": "tests/test_affine.py", "file_name": "test_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 77, "end_line": 88, "span_ids": ["TestAffine", "impl:3", "TestAffine.test_affine"], "tokens": 105}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffine(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_affine(self, input_param, input_data, expected_val):\n g = Affine(**input_param)\n result, _ = g(**input_data)\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_grid.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_grid.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_affine_grid.py", "file_name": "test_affine_grid.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 88, "span_ids": ["docstring"], "tokens": 1413}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import AffineGrid\n\nTEST_CASES = [\n [\n {\"as_tensor_output\": False, \"device\": torch.device(\"cpu:0\")},\n {\"spatial_size\": (2, 2)},\n np.array([[[-0.5, -0.5], [0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]], [[1.0, 1.0], [1.0, 1.0]]]),\n ],\n [\n {\"as_tensor_output\": True, \"device\": None},\n {\"spatial_size\": (2, 2)},\n torch.tensor([[[-0.5, -0.5], [0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]], [[1.0, 1.0], [1.0, 1.0]]]),\n ],\n [{\"as_tensor_output\": False, \"device\": None}, {\"grid\": np.ones((3, 3, 3))}, np.ones((3, 3, 3))],\n [{\"as_tensor_output\": True, \"device\": torch.device(\"cpu:0\")}, {\"grid\": np.ones((3, 3, 3))}, torch.ones((3, 3, 3))],\n [{\"as_tensor_output\": False, \"device\": None}, {\"grid\": torch.ones((3, 3, 3))}, np.ones((3, 3, 3))],\n [\n {\"as_tensor_output\": True, \"device\": torch.device(\"cpu:0\")},\n {\"grid\": torch.ones((3, 3, 3))},\n torch.ones((3, 3, 3)),\n ],\n [\n {\n \"rotate_params\": (1.0, 1.0),\n \"scale_params\": (-20, 10),\n \"as_tensor_output\": True,\n \"device\": torch.device(\"cpu:0\"),\n },\n {\"grid\": torch.ones((3, 3, 3))},\n torch.tensor(\n [\n [[-19.2208, -19.2208, -19.2208], [-19.2208, -19.2208, -19.2208], [-19.2208, -19.2208, -19.2208]],\n [[-11.4264, -11.4264, -11.4264], [-11.4264, -11.4264, -11.4264], [-11.4264, -11.4264, -11.4264]],\n [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],\n ]\n ),\n ],\n [\n {\n \"rotate_params\": (1.0, 1.0, 1.0),\n \"scale_params\": (-20, 10),\n \"as_tensor_output\": True,\n \"device\": torch.device(\"cpu:0\"),\n },\n {\"grid\": torch.ones((4, 3, 3, 3))},\n torch.tensor(\n [\n [\n [[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]],\n [[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]],\n [[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]],\n ],\n [\n [[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381]],\n [[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381]],\n [[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381]],\n ],\n [\n [[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]],\n [[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]],\n [[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]],\n ],\n [\n [[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]],\n [[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]],\n [[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]],\n ],\n ]\n ),\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_grid.py_TestAffineGrid_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_grid.py_TestAffineGrid_", "embedding": null, "metadata": {"file_path": "tests/test_affine_grid.py", "file_name": "test_affine_grid.py", "file_type": "text/x-python", "category": "test", "start_line": 91, "end_line": 105, "span_ids": ["TestAffineGrid", "impl:3", "TestAffineGrid.test_affine_grid"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineGrid(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_affine_grid(self, input_param, input_data, expected_val):\n g = AffineGrid(**input_param)\n result, _ = g(**input_data)\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_unittest_TEST_NORM_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_unittest_TEST_NORM_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 30, "span_ids": ["docstring"], "tokens": 341}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import normalize_transform, to_norm_affine\nfrom monai.networks.layers import AffineTransform\n\nTEST_NORM_CASES = [\n [(4, 5), True, [[[0.666667, 0, -1], [0, 0.5, -1], [0, 0, 1]]]],\n [\n (2, 4, 5),\n True,\n [[[2.0, 0.0, 0.0, -1.0], [0.0, 0.6666667, 0.0, -1.0], [0.0, 0.0, 0.5, -1.0], [0.0, 0.0, 0.0, 1.0]]],\n ],\n [(4, 5), False, [[[0.5, 0.0, -0.75], [0.0, 0.4, -0.8], [0.0, 0.0, 1.0]]]],\n [(2, 4, 5), False, [[[1.0, 0.0, 0.0, -0.5], [0.0, 0.5, 0.0, -0.75], [0.0, 0.0, 0.4, -0.8], [0.0, 0.0, 0.0, 1.0]]]],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TEST_TO_NORM_AFFINE_CASES_TEST_ILL_TO_NORM_AFFINE_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TEST_TO_NORM_AFFINE_CASES_TEST_ILL_TO_NORM_AFFINE_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 32, "end_line": 67, "span_ids": ["impl:5", "docstring"], "tokens": 720}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_TO_NORM_AFFINE_CASES = [\n [\n [[[1, 0, 0], [0, 1, 0], [0, 0, 1]]],\n (4, 6),\n (5, 3),\n True,\n [[[1.3333334, 0.0, 0.33333337], [0.0, 0.4, -0.6], [0.0, 0.0, 1.0]]],\n ],\n [\n [[[1, 0, 0], [0, 1, 0], [0, 0, 1]]],\n (4, 6),\n (5, 3),\n False,\n [[[1.25, 0.0, 0.25], [0.0, 0.5, -0.5], [0.0, 0.0, 1.0]]],\n ],\n [\n [[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]],\n (2, 4, 6),\n (3, 5, 3),\n True,\n [[[2.0, 0.0, 0.0, 1.0], [0.0, 1.3333334, 0.0, 0.33333337], [0.0, 0.0, 0.4, -0.6], [0.0, 0.0, 0.0, 1.0]]],\n ],\n [\n [[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]],\n (2, 4, 6),\n (3, 5, 3),\n False,\n [[[1.5, 0.0, 0.0, 0.5], [0.0, 1.25, 0.0, 0.25], [0.0, 0.0, 0.5, -0.5], [0.0, 0.0, 0.0, 1.0]]],\n ],\n]\n\nTEST_ILL_TO_NORM_AFFINE_CASES = [\n [[[[1, 0, 0], [0, 1, 0], [0, 0, 1]]], (3, 4, 6), (3, 5, 3), False],\n [[[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]], (4, 6), (3, 5, 3), True],\n [[[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]]], (4, 6), (3, 5, 3), True],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestNormTransform_TestNormTransform.test_norm_xform.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestNormTransform_TestNormTransform.test_norm_xform.if_torch_cuda_is_availabl.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 70, "end_line": 83, "span_ids": ["TestNormTransform.test_norm_xform", "TestNormTransform"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNormTransform(unittest.TestCase):\n @parameterized.expand(TEST_NORM_CASES)\n def test_norm_xform(self, input_shape, align_corners, expected):\n norm = normalize_transform(\n input_shape, device=torch.device(\"cpu:0\"), dtype=torch.float32, align_corners=align_corners\n )\n norm = norm.detach().cpu().numpy()\n np.testing.assert_allclose(norm, expected, atol=1e-6)\n if torch.cuda.is_available():\n norm = normalize_transform(\n input_shape, device=torch.device(\"cuda:0\"), dtype=torch.float32, align_corners=align_corners\n )\n norm = norm.detach().cpu().numpy()\n np.testing.assert_allclose(norm, expected, atol=1e-4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestToNormAffine_TestToNormAffine.test_to_norm_affine.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestToNormAffine_TestToNormAffine.test_to_norm_affine.if_torch_cuda_is_availabl.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 86, "end_line": 98, "span_ids": ["TestToNormAffine.test_to_norm_affine", "TestToNormAffine"], "tokens": 209}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestToNormAffine(unittest.TestCase):\n @parameterized.expand(TEST_TO_NORM_AFFINE_CASES)\n def test_to_norm_affine(self, affine, src_size, dst_size, align_corners, expected):\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n new_affine = to_norm_affine(affine, src_size, dst_size, align_corners)\n new_affine = new_affine.detach().cpu().numpy()\n np.testing.assert_allclose(new_affine, expected, atol=1e-6)\n\n if torch.cuda.is_available():\n affine = torch.as_tensor(affine, device=torch.device(\"cuda:0\"), dtype=torch.float32)\n new_affine = to_norm_affine(affine, src_size, dst_size, align_corners)\n new_affine = new_affine.detach().cpu().numpy()\n np.testing.assert_allclose(new_affine, expected, atol=1e-4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform_TestAffineTransform.test_affine_shift.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform_TestAffineTransform.test_affine_shift.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 109, "end_line": 116, "span_ids": ["TestAffineTransform", "TestAffineTransform.test_affine_shift"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n def test_affine_shift(self):\n affine = torch.as_tensor([[1.0, 0.0, 0.0], [0.0, 1.0, -1.0]])\n image = torch.as_tensor([[[[4.0, 1.0, 3.0, 2.0], [7.0, 6.0, 8.0, 5.0], [3.0, 5.0, 3.0, 6.0]]]])\n out = AffineTransform()(image, affine)\n out = out.detach().cpu().numpy()\n expected = [[[[0, 4, 1, 3], [0, 7, 6, 8], [0, 3, 5, 3]]]]\n np.testing.assert_allclose(out, expected, atol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_shift_1_TestAffineTransform.test_affine_shift_1.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_shift_1_TestAffineTransform.test_affine_shift_1.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 118, "end_line": 124, "span_ids": ["TestAffineTransform.test_affine_shift_1"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_affine_shift_1(self):\n affine = torch.as_tensor([[1.0, 0.0, -1.0], [0.0, 1.0, -1.0]])\n image = torch.as_tensor([[[[4.0, 1.0, 3.0, 2.0], [7.0, 6.0, 8.0, 5.0], [3.0, 5.0, 3.0, 6.0]]]])\n out = AffineTransform()(image, affine)\n out = out.detach().cpu().numpy()\n expected = [[[[0, 0, 0, 0], [0, 4, 1, 3], [0, 7, 6, 8]]]]\n np.testing.assert_allclose(out, expected, atol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_shift_2_TestAffineTransform.test_affine_shift_2.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_shift_2_TestAffineTransform.test_affine_shift_2.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 126, "end_line": 132, "span_ids": ["TestAffineTransform.test_affine_shift_2"], "tokens": 199}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_affine_shift_2(self):\n affine = torch.as_tensor([[1.0, 0.0, -1.0], [0.0, 1.0, 0.0]])\n image = torch.as_tensor([[[[4.0, 1.0, 3.0, 2.0], [7.0, 6.0, 8.0, 5.0], [3.0, 5.0, 3.0, 6.0]]]])\n out = AffineTransform()(image, affine)\n out = out.detach().cpu().numpy()\n expected = [[[[0, 0, 0, 0], [4, 1, 3, 2], [7, 6, 8, 5]]]]\n np.testing.assert_allclose(out, expected, atol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_TestAffineTransform.test_zoom.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_TestAffineTransform.test_zoom.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 134, "end_line": 139, "span_ids": ["TestAffineTransform.test_zoom"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_zoom(self):\n affine = torch.as_tensor([[1.0, 0.0, 0.0], [0.0, 2.0, 0.0]])\n image = torch.arange(1.0, 13.0).view(1, 1, 3, 4).to(device=torch.device(\"cpu:0\"))\n out = AffineTransform((3, 2))(image, affine)\n expected = [[[[1, 3], [5, 7], [9, 11]]]]\n np.testing.assert_allclose(out, expected, atol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_1_TestAffineTransform.test_zoom_1.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_1_TestAffineTransform.test_zoom_1.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 141, "end_line": 146, "span_ids": ["TestAffineTransform.test_zoom_1"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_zoom_1(self):\n affine = torch.as_tensor([[2.0, 0.0, 0.0], [0.0, 1.0, 0.0]])\n image = torch.arange(1.0, 13.0).view(1, 1, 3, 4).to(device=torch.device(\"cpu:0\"))\n out = AffineTransform()(image, affine, (1, 4))\n expected = [[[[1, 2, 3, 4]]]]\n np.testing.assert_allclose(out, expected, atol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_2_TestAffineTransform.test_zoom_2.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_zoom_2_TestAffineTransform.test_zoom_2.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 148, "end_line": 153, "span_ids": ["TestAffineTransform.test_zoom_2"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_zoom_2(self):\n affine = torch.as_tensor([[2.0, 0.0, 0.0], [0.0, 2.0, 0.0]], dtype=torch.float32)\n image = torch.arange(1.0, 13.0).view(1, 1, 3, 4).to(device=torch.device(\"cpu:0\"))\n out = AffineTransform((1, 2))(image, affine)\n expected = [[[[1, 3]]]]\n np.testing.assert_allclose(out, expected, atol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_minimum_TestAffineTransform.test_affine_transform_minimum.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_minimum_TestAffineTransform.test_affine_transform_minimum.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 155, "end_line": 172, "span_ids": ["TestAffineTransform.test_affine_transform_minimum"], "tokens": 305}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_affine_transform_minimum(self):\n t = np.pi / 3\n affine = [[np.cos(t), -np.sin(t), 0], [np.sin(t), np.cos(t), 0], [0, 0, 1]]\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n image = torch.arange(24.0).view(1, 1, 4, 6).to(device=torch.device(\"cpu:0\"))\n out = AffineTransform()(image, affine)\n out = out.detach().cpu().numpy()\n expected = [\n [\n [\n [0.0, 0.06698727, 0.0, 0.0, 0.0, 0.0],\n [3.8660254, 0.86602557, 0.0, 0.0, 0.0, 0.0],\n [7.732051, 3.035899, 0.73205125, 0.0, 0.0, 0.0],\n [11.598076, 6.901923, 2.7631402, 0.0, 0.0, 0.0],\n ]\n ]\n ]\n np.testing.assert_allclose(out, expected, atol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_2d_TestAffineTransform.test_affine_transform_2d.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_2d_TestAffineTransform.test_affine_transform_2d.if_torch_cuda_is_availabl.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 174, "end_line": 208, "span_ids": ["TestAffineTransform.test_affine_transform_2d"], "tokens": 558}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_affine_transform_2d(self):\n t = np.pi / 3\n affine = [[np.cos(t), -np.sin(t), 0], [np.sin(t), np.cos(t), 0], [0, 0, 1]]\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n image = torch.arange(24.0).view(1, 1, 4, 6).to(device=torch.device(\"cpu:0\"))\n xform = AffineTransform((3, 4), padding_mode=\"border\", align_corners=True, mode=\"bilinear\")\n out = xform(image, affine)\n out = out.detach().cpu().numpy()\n expected = [\n [\n [\n [7.1525574e-07, 4.9999994e-01, 1.0000000e00, 1.4999999e00],\n [3.8660259e00, 1.3660253e00, 1.8660252e00, 2.3660252e00],\n [7.7320518e00, 3.0358994e00, 2.7320509e00, 3.2320507e00],\n ]\n ]\n ]\n np.testing.assert_allclose(out, expected, atol=1e-5)\n\n if torch.cuda.is_available():\n affine = torch.as_tensor(affine, device=torch.device(\"cuda:0\"), dtype=torch.float32)\n image = torch.arange(24.0).view(1, 1, 4, 6).to(device=torch.device(\"cuda:0\"))\n xform = AffineTransform(padding_mode=\"border\", align_corners=True, mode=\"bilinear\")\n out = xform(image, affine, (3, 4))\n out = out.detach().cpu().numpy()\n expected = [\n [\n [\n [7.1525574e-07, 4.9999994e-01, 1.0000000e00, 1.4999999e00],\n [3.8660259e00, 1.3660253e00, 1.8660252e00, 2.3660252e00],\n [7.7320518e00, 3.0358994e00, 2.7320509e00, 3.2320507e00],\n ]\n ]\n ]\n np.testing.assert_allclose(out, expected, atol=1e-4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_3d_TestAffineTransform.test_affine_transform_3d.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_3d_TestAffineTransform.test_affine_transform_3d.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 210, "end_line": 234, "span_ids": ["TestAffineTransform.test_affine_transform_3d"], "tokens": 519}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_affine_transform_3d(self):\n t = np.pi / 3\n affine = [[1, 0, 0, 0], [0.0, np.cos(t), -np.sin(t), 0], [0, np.sin(t), np.cos(t), 0], [0, 0, 0, 1]]\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n image = torch.arange(48.0).view(2, 1, 4, 2, 3).to(device=torch.device(\"cpu:0\"))\n xform = AffineTransform((3, 4, 2), padding_mode=\"border\", align_corners=False, mode=\"bilinear\")\n out = xform(image, affine)\n out = out.detach().cpu().numpy()\n expected = [\n [\n [\n [[0.00000006, 0.5000001], [2.3660254, 1.3660254], [4.732051, 2.4019241], [5.0, 3.9019237]],\n [[6.0, 6.5], [8.366026, 7.3660254], [10.732051, 8.401924], [11.0, 9.901924]],\n [[12.0, 12.5], [14.366026, 13.366025], [16.732052, 14.401924], [17.0, 15.901923]],\n ]\n ],\n [\n [\n [[24.0, 24.5], [26.366024, 25.366024], [28.732052, 26.401924], [29.0, 27.901924]],\n [[30.0, 30.5], [32.366028, 31.366026], [34.732048, 32.401924], [35.0, 33.901924]],\n [[36.0, 36.5], [38.366024, 37.366024], [40.73205, 38.401924], [41.0, 39.901924]],\n ]\n ],\n ]\n np.testing.assert_allclose(out, expected, atol=1e-4)\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_3d.if_torch_cuda_is_availabl_TestAffineTransform.test_affine_transform_3d.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_affine_transform_3d.if_torch_cuda_is_availabl_TestAffineTransform.test_affine_transform_3d.if_torch_cuda_is_availabl.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 236, "end_line": 258, "span_ids": ["TestAffineTransform.test_affine_transform_3d"], "tokens": 460}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_affine_transform_3d(self):\n # ... other code\n\n if torch.cuda.is_available():\n affine = torch.as_tensor(affine, device=torch.device(\"cuda:0\"), dtype=torch.float32)\n image = torch.arange(48.0).view(2, 1, 4, 2, 3).to(device=torch.device(\"cuda:0\"))\n xform = AffineTransform(padding_mode=\"border\", align_corners=False, mode=\"bilinear\")\n out = xform(image, affine, (3, 4, 2))\n out = out.detach().cpu().numpy()\n expected = [\n [\n [\n [[0.00000006, 0.5000001], [2.3660254, 1.3660254], [4.732051, 2.4019241], [5.0, 3.9019237]],\n [[6.0, 6.5], [8.366026, 7.3660254], [10.732051, 8.401924], [11.0, 9.901924]],\n [[12.0, 12.5], [14.366026, 13.366025], [16.732052, 14.401924], [17.0, 15.901923]],\n ]\n ],\n [\n [\n [[24.0, 24.5], [26.366024, 25.366024], [28.732052, 26.401924], [29.0, 27.901924]],\n [[30.0, 30.5], [32.366028, 31.366026], [34.732048, 32.401924], [35.0, 33.901924]],\n [[36.0, 36.5], [38.366024, 37.366024], [40.73205, 38.401924], [41.0, 39.901924]],\n ]\n ],\n ]\n np.testing.assert_allclose(out, expected, atol=1e-4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_ill_affine_transform_TestAffineTransform.test_ill_affine_transform.None_3.xform_image_affine_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_ill_affine_transform_TestAffineTransform.test_ill_affine_transform.None_3.xform_image_affine_", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 260, "end_line": 293, "span_ids": ["TestAffineTransform.test_ill_affine_transform"], "tokens": 720}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_ill_affine_transform(self):\n with self.assertRaises(ValueError): # image too small\n t = np.pi / 3\n affine = [[1, 0, 0, 0], [0.0, np.cos(t), -np.sin(t), 0], [0, np.sin(t), np.cos(t), 0], [0, 0, 0, 1]]\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n xform = AffineTransform((3, 4, 2), padding_mode=\"border\", align_corners=False, mode=\"bilinear\")\n xform(torch.as_tensor([1.0, 2.0, 3.0]), affine)\n\n with self.assertRaises(ValueError): # output shape too small\n t = np.pi / 3\n affine = [[1, 0, 0, 0], [0.0, np.cos(t), -np.sin(t), 0], [0, np.sin(t), np.cos(t), 0], [0, 0, 0, 1]]\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n image = torch.arange(48).view(2, 1, 4, 2, 3).to(device=torch.device(\"cpu:0\"))\n xform = AffineTransform((3, 4), padding_mode=\"border\", align_corners=False, mode=\"bilinear\")\n xform(image, affine)\n\n with self.assertRaises(ValueError): # incorrect affine\n t = np.pi / 3\n affine = [[1, 0, 0, 0], [0.0, np.cos(t), -np.sin(t), 0], [0, np.sin(t), np.cos(t), 0], [0, 0, 0, 1]]\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n affine = affine.unsqueeze(0).unsqueeze(0)\n image = torch.arange(48).view(2, 1, 4, 2, 3).to(device=torch.device(\"cpu:0\"))\n xform = AffineTransform((2, 3, 4), padding_mode=\"border\", align_corners=False, mode=\"bilinear\")\n xform(image, affine)\n\n with self.assertRaises(ValueError): # batch doesn't match\n t = np.pi / 3\n affine = [[1, 0, 0, 0], [0.0, np.cos(t), -np.sin(t), 0], [0, np.sin(t), np.cos(t), 0], [0, 0, 0, 1]]\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n affine = affine.unsqueeze(0)\n affine = affine.repeat(3, 1, 1)\n image = torch.arange(48).view(2, 1, 4, 2, 3).to(device=torch.device(\"cpu:0\"))\n xform = AffineTransform((2, 3, 4), padding_mode=\"border\", align_corners=False, mode=\"bilinear\")\n xform(image, affine)\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_ill_affine_transform.with_self_assertRaises_Ru_TestAffineTransform.test_ill_affine_transform.None_6.out.AffineTransform_1_2_i": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_ill_affine_transform.with_self_assertRaises_Ru_TestAffineTransform.test_ill_affine_transform.None_6.out.AffineTransform_1_2_i", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 295, "end_line": 314, "span_ids": ["TestAffineTransform.test_ill_affine_transform"], "tokens": 439}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_ill_affine_transform(self):\n # ... other code\n\n with self.assertRaises(RuntimeError): # input grid dtypes different\n t = np.pi / 3\n affine = [[1, 0, 0, 0], [0.0, np.cos(t), -np.sin(t), 0], [0, np.sin(t), np.cos(t), 0], [0, 0, 0, 1]]\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n affine = affine.unsqueeze(0)\n affine = affine.repeat(2, 1, 1)\n image = torch.arange(48).view(2, 1, 4, 2, 3).to(device=torch.device(\"cpu:0\"), dtype=torch.int32)\n xform = AffineTransform((2, 3, 4), padding_mode=\"border\", mode=\"bilinear\", normalized=True)\n xform(image, affine)\n\n with self.assertRaises(ValueError): # wrong affine\n affine = torch.as_tensor([[1, 0, 0, 0], [0, 0, 0, 1]])\n image = torch.arange(48).view(2, 1, 4, 2, 3).to(device=torch.device(\"cpu:0\"))\n xform = AffineTransform((2, 3, 4), padding_mode=\"border\", align_corners=False, mode=\"bilinear\")\n xform(image, affine)\n\n with self.assertRaises(RuntimeError): # dtype doesn't match\n affine = torch.as_tensor([[2.0, 0.0, 0.0], [0.0, 2.0, 0.0]], dtype=torch.float64)\n image = torch.arange(1.0, 13.0).view(1, 1, 3, 4).to(device=torch.device(\"cpu:0\"))\n out = AffineTransform((1, 2))(image, affine)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_forward_2d_TestAffineTransform.test_forward_2d.None_5": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_forward_2d_TestAffineTransform.test_forward_2d.None_5", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 316, "end_line": 338, "span_ids": ["TestAffineTransform.test_forward_2d"], "tokens": 328}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_forward_2d(self):\n x = torch.rand(2, 1, 4, 4)\n theta = torch.Tensor([[[0, -1, 0], [1, 0, 0]]]).repeat(2, 1, 1)\n grid = torch.nn.functional.affine_grid(theta, x.size(), align_corners=False)\n expected = torch.nn.functional.grid_sample(x, grid, align_corners=False)\n expected = expected.detach().cpu().numpy()\n\n actual = AffineTransform(normalized=True, reverse_indexing=False)(x, theta)\n actual = actual.detach().cpu().numpy()\n np.testing.assert_allclose(actual, expected)\n np.testing.assert_allclose(list(theta.shape), [2, 2, 3])\n\n theta = torch.Tensor([[0, -1, 0], [1, 0, 0]])\n actual = AffineTransform(normalized=True, reverse_indexing=False)(x, theta)\n actual = actual.detach().cpu().numpy()\n np.testing.assert_allclose(actual, expected)\n np.testing.assert_allclose(list(theta.shape), [2, 3])\n\n theta = torch.Tensor([[[0, -1, 0], [1, 0, 0]]])\n actual = AffineTransform(normalized=True, reverse_indexing=False)(x, theta)\n actual = actual.detach().cpu().numpy()\n np.testing.assert_allclose(actual, expected)\n np.testing.assert_allclose(list(theta.shape), [1, 2, 3])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_forward_3d_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestAffineTransform.test_forward_3d_", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 340, "end_line": 367, "span_ids": ["TestAffineTransform.test_forward_3d", "impl:7"], "tokens": 397}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAffineTransform(unittest.TestCase):\n\n def test_forward_3d(self):\n x = torch.rand(2, 1, 4, 4, 4)\n theta = torch.Tensor([[[0, 0, -1, 0], [1, 0, 0, 0], [0, 0, 1, 0]]]).repeat(2, 1, 1)\n grid = torch.nn.functional.affine_grid(theta, x.size(), align_corners=False)\n expected = torch.nn.functional.grid_sample(x, grid, align_corners=False)\n expected = expected.detach().cpu().numpy()\n\n actual = AffineTransform(normalized=True, reverse_indexing=False)(x, theta)\n actual = actual.detach().cpu().numpy()\n np.testing.assert_allclose(actual, expected)\n np.testing.assert_allclose(list(theta.shape), [2, 3, 4])\n\n theta = torch.Tensor([[0, 0, -1, 0], [1, 0, 0, 0], [0, 0, 1, 0]])\n actual = AffineTransform(normalized=True, reverse_indexing=False)(x, theta)\n actual = actual.detach().cpu().numpy()\n np.testing.assert_allclose(actual, expected)\n np.testing.assert_allclose(list(theta.shape), [3, 4])\n\n theta = torch.Tensor([[[0, 0, -1, 0], [1, 0, 0, 0], [0, 0, 1, 0]]])\n actual = AffineTransform(normalized=True, reverse_indexing=False)(x, theta)\n actual = actual.detach().cpu().numpy()\n np.testing.assert_allclose(actual, expected)\n np.testing.assert_allclose(list(theta.shape), [1, 3, 4])\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_first.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_first.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_as_channel_first.py", "file_name": "test_as_channel_first.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 36, "span_ids": ["TestAsChannelFirst.test_shape", "TestAsChannelFirst", "impl:7", "docstring"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import AsChannelFirst\n\nTEST_CASE_1 = [{\"channel_dim\": -1}, (4, 1, 2, 3)]\n\nTEST_CASE_2 = [{\"channel_dim\": 3}, (4, 1, 2, 3)]\n\nTEST_CASE_3 = [{\"channel_dim\": 2}, (3, 1, 2, 4)]\n\n\nclass TestAsChannelFirst(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_shape(self, input_param, expected_shape):\n test_data = np.random.randint(0, 2, size=[1, 2, 3, 4])\n result = AsChannelFirst(**input_param)(test_data)\n self.assertTupleEqual(result.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_firstd.py_unittest_TEST_CASE_3._keys_image_labe": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_firstd.py_unittest_TEST_CASE_3._keys_image_labe", "embedding": null, "metadata": {"file_path": "tests/test_as_channel_firstd.py", "file_name": "test_as_channel_firstd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 23, "span_ids": ["docstring"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import AsChannelFirstd\n\nTEST_CASE_1 = [{\"keys\": [\"image\", \"label\", \"extra\"], \"channel_dim\": -1}, (4, 1, 2, 3)]\n\nTEST_CASE_2 = [{\"keys\": [\"image\", \"label\", \"extra\"], \"channel_dim\": 3}, (4, 1, 2, 3)]\n\nTEST_CASE_3 = [{\"keys\": [\"image\", \"label\", \"extra\"], \"channel_dim\": 2}, (3, 1, 2, 4)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_firstd.py_TestAsChannelFirstd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_firstd.py_TestAsChannelFirstd_", "embedding": null, "metadata": {"file_path": "tests/test_as_channel_firstd.py", "file_name": "test_as_channel_firstd.py", "file_type": "text/x-python", "category": "test", "start_line": 24, "end_line": 40, "span_ids": ["TestAsChannelFirstd", "TestAsChannelFirstd.test_shape", "impl:7"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAsChannelFirstd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_shape(self, input_param, expected_shape):\n test_data = {\n \"image\": np.random.randint(0, 2, size=[1, 2, 3, 4]),\n \"label\": np.random.randint(0, 2, size=[1, 2, 3, 4]),\n \"extra\": np.random.randint(0, 2, size=[1, 2, 3, 4]),\n }\n result = AsChannelFirstd(**input_param)(test_data)\n self.assertTupleEqual(result[\"image\"].shape, expected_shape)\n self.assertTupleEqual(result[\"label\"].shape, expected_shape)\n self.assertTupleEqual(result[\"extra\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_last.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_last.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_as_channel_last.py", "file_name": "test_as_channel_last.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 36, "span_ids": ["TestAsChannelLast", "TestAsChannelLast.test_shape", "impl:7", "docstring"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import AsChannelLast\n\nTEST_CASE_1 = [{\"channel_dim\": 0}, (2, 3, 4, 1)]\n\nTEST_CASE_2 = [{\"channel_dim\": 1}, (1, 3, 4, 2)]\n\nTEST_CASE_3 = [{\"channel_dim\": 3}, (1, 2, 3, 4)]\n\n\nclass TestAsChannelLast(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_shape(self, input_param, expected_shape):\n test_data = np.random.randint(0, 2, size=[1, 2, 3, 4])\n result = AsChannelLast(**input_param)(test_data)\n self.assertTupleEqual(result.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_lastd.py_unittest_TEST_CASE_3._keys_image_labe": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_lastd.py_unittest_TEST_CASE_3._keys_image_labe", "embedding": null, "metadata": {"file_path": "tests/test_as_channel_lastd.py", "file_name": "test_as_channel_lastd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 23, "span_ids": ["docstring"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import AsChannelLastd\n\nTEST_CASE_1 = [{\"keys\": [\"image\", \"label\", \"extra\"], \"channel_dim\": 0}, (2, 3, 4, 1)]\n\nTEST_CASE_2 = [{\"keys\": [\"image\", \"label\", \"extra\"], \"channel_dim\": 1}, (1, 3, 4, 2)]\n\nTEST_CASE_3 = [{\"keys\": [\"image\", \"label\", \"extra\"], \"channel_dim\": 3}, (1, 2, 3, 4)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_lastd.py_TestAsChannelLastd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_channel_lastd.py_TestAsChannelLastd_", "embedding": null, "metadata": {"file_path": "tests/test_as_channel_lastd.py", "file_name": "test_as_channel_lastd.py", "file_type": "text/x-python", "category": "test", "start_line": 24, "end_line": 40, "span_ids": ["TestAsChannelLastd.test_shape", "TestAsChannelLastd", "impl:7"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAsChannelLastd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_shape(self, input_param, expected_shape):\n test_data = {\n \"image\": np.random.randint(0, 2, size=[1, 2, 3, 4]),\n \"label\": np.random.randint(0, 2, size=[1, 2, 3, 4]),\n \"extra\": np.random.randint(0, 2, size=[1, 2, 3, 4]),\n }\n result = AsChannelLastd(**input_param)(test_data)\n self.assertTupleEqual(result[\"image\"].shape, expected_shape)\n self.assertTupleEqual(result[\"label\"].shape, expected_shape)\n self.assertTupleEqual(result[\"extra\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discrete.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discrete.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_as_discrete.py", "file_name": "test_as_discrete.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 51, "span_ids": ["TestAsDiscrete", "TestAsDiscrete.test_value_shape", "impl:7", "docstring"], "tokens": 434}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import AsDiscrete\n\nTEST_CASE_1 = [\n {\"argmax\": True, \"to_onehot\": False, \"n_classes\": None, \"threshold_values\": False, \"logit_thresh\": 0.5},\n torch.tensor([[[[0.0, 1.0]], [[2.0, 3.0]]]]),\n torch.tensor([[[[1.0, 1.0]]]]),\n (1, 1, 1, 2),\n]\n\nTEST_CASE_2 = [\n {\"argmax\": True, \"to_onehot\": True, \"n_classes\": 2, \"threshold_values\": False, \"logit_thresh\": 0.5},\n torch.tensor([[[[0.0, 1.0]], [[2.0, 3.0]]]]),\n torch.tensor([[[[0.0, 0.0]], [[1.0, 1.0]]]]),\n (1, 2, 1, 2),\n]\n\nTEST_CASE_3 = [\n {\"argmax\": False, \"to_onehot\": False, \"n_classes\": None, \"threshold_values\": True, \"logit_thresh\": 0.6},\n torch.tensor([[[[0.0, 1.0], [2.0, 3.0]]]]),\n torch.tensor([[[[0.0, 1.0], [1.0, 1.0]]]]),\n (1, 1, 2, 2),\n]\n\n\nclass TestAsDiscrete(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value_shape(self, input_param, img, out, expected_shape):\n result = AsDiscrete(**input_param)(img)\n torch.testing.assert_allclose(result, out)\n self.assertTupleEqual(result.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discreted.py_unittest_TEST_CASE_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discreted.py_unittest_TEST_CASE_3._", "embedding": null, "metadata": {"file_path": "tests/test_as_discreted.py", "file_name": "test_as_discreted.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 59, "span_ids": ["impl:5", "docstring"], "tokens": 524}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import AsDiscreted\n\nTEST_CASE_1 = [\n {\n \"keys\": [\"pred\", \"label\"],\n \"argmax\": [True, False],\n \"to_onehot\": True,\n \"n_classes\": 2,\n \"threshold_values\": False,\n \"logit_thresh\": 0.5,\n },\n {\"pred\": torch.tensor([[[[0.0, 1.0]], [[2.0, 3.0]]]]), \"label\": torch.tensor([[[[0, 1]]]])},\n {\"pred\": torch.tensor([[[[0.0, 0.0]], [[1.0, 1.0]]]]), \"label\": torch.tensor([[[[1.0, 0.0]], [[0.0, 1.0]]]])},\n (1, 2, 1, 2),\n]\n\nTEST_CASE_2 = [\n {\n \"keys\": [\"pred\", \"label\"],\n \"argmax\": False,\n \"to_onehot\": False,\n \"n_classes\": None,\n \"threshold_values\": [True, False],\n \"logit_thresh\": 0.6,\n },\n {\"pred\": torch.tensor([[[[0.0, 1.0], [2.0, 3.0]]]]), \"label\": torch.tensor([[[[0, 1], [1, 1]]]])},\n {\"pred\": torch.tensor([[[[0.0, 1.0], [1.0, 1.0]]]]), \"label\": torch.tensor([[[[0.0, 1.0], [1.0, 1.0]]]])},\n (1, 1, 2, 2),\n]\n\nTEST_CASE_3 = [\n {\n \"keys\": [\"pred\"],\n \"argmax\": True,\n \"to_onehot\": True,\n \"n_classes\": 2,\n \"threshold_values\": False,\n \"logit_thresh\": 0.5,\n },\n {\"pred\": torch.tensor([[[[0.0, 1.0]], [[2.0, 3.0]]]])},\n {\"pred\": torch.tensor([[[[0.0, 0.0]], [[1.0, 1.0]]]])},\n (1, 2, 1, 2),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discreted.py_TestAsDiscreted_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_as_discreted.py_TestAsDiscreted_", "embedding": null, "metadata": {"file_path": "tests/test_as_discreted.py", "file_name": "test_as_discreted.py", "file_type": "text/x-python", "category": "test", "start_line": 60, "end_line": 73, "span_ids": ["TestAsDiscreted.test_value_shape", "impl:7", "TestAsDiscreted"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestAsDiscreted(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value_shape(self, input_param, test_input, output, expected_shape):\n result = AsDiscreted(**input_param)(test_input)\n torch.testing.assert_allclose(result[\"pred\"], output[\"pred\"])\n self.assertTupleEqual(result[\"pred\"].shape, expected_shape)\n if \"label\" in result:\n torch.testing.assert_allclose(result[\"label\"], output[\"label\"])\n self.assertTupleEqual(result[\"label\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_border_pad.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_border_pad.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_border_pad.py", "file_name": "test_border_pad.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 57, "span_ids": ["impl:9", "TestBorderPad.test_pad_shape", "TestBorderPad", "docstring"], "tokens": 395}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import BorderPad\nfrom monai.utils import NumpyPadMode\n\nTEST_CASE_1 = [\n {\"spatial_border\": 2, \"mode\": \"constant\"},\n np.zeros((3, 8, 8, 4)),\n np.zeros((3, 12, 12, 8)),\n]\n\nTEST_CASE_2 = [\n {\"spatial_border\": [1, 2, 3], \"mode\": \"constant\"},\n np.zeros((3, 8, 8, 4)),\n np.zeros((3, 10, 12, 10)),\n]\n\nTEST_CASE_3 = [\n {\"spatial_border\": [1, 2, 3, 4, 5, 6], \"mode\": \"constant\"},\n np.zeros((3, 8, 8, 4)),\n np.zeros((3, 11, 15, 15)),\n]\n\nTEST_CASE_4 = [\n {\"spatial_border\": [1, 2, 3, 4, 5, 6], \"mode\": NumpyPadMode.CONSTANT},\n np.zeros((3, 8, 8, 4)),\n np.zeros((3, 11, 15, 15)),\n]\n\n\nclass TestBorderPad(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4])\n def test_pad_shape(self, input_param, input_data, expected_val):\n padder = BorderPad(**input_param)\n result = padder(input_data)\n self.assertAlmostEqual(result.shape, expected_val.shape)\n result = padder(input_data, mode=input_param[\"mode\"])\n self.assertAlmostEqual(result.shape, expected_val.shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_border_padd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_border_padd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_border_padd.py", "file_name": "test_border_padd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 61, "span_ids": ["TestBorderPadd.test_pad_shape", "impl:11", "TestBorderPadd", "docstring"], "tokens": 548}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import BorderPadd\nfrom monai.utils import NumpyPadMode\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\", \"seg\"], \"spatial_border\": 2, \"mode\": [\"constant\", \"edge\"]},\n {\"img\": np.zeros((3, 8, 8, 4)), \"seg\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 12, 12, 8)),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"img\", \"spatial_border\": [1, 2, 3], \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 10, 12, 10)),\n]\n\nTEST_CASE_3 = [\n {\"keys\": \"img\", \"spatial_border\": [1, 2, 3, 4, 5, 6], \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 11, 15, 15)),\n]\n\nTEST_CASE_4 = [\n {\"keys\": [\"img\", \"seg\"], \"spatial_border\": 2, \"mode\": [\"constant\", NumpyPadMode.EDGE]},\n {\"img\": np.zeros((3, 8, 8, 4)), \"seg\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 12, 12, 8)),\n]\n\nTEST_CASE_5 = [\n {\"keys\": [\"img\", \"seg\"], \"spatial_border\": 2, \"mode\": [NumpyPadMode.CONSTANT, NumpyPadMode.EDGE]},\n {\"img\": np.zeros((3, 8, 8, 4)), \"seg\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 12, 12, 8)),\n]\n\n\nclass TestBorderPadd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_pad_shape(self, input_param, input_data, expected_val):\n padder = BorderPadd(**input_param)\n result = padder(input_data)\n self.assertAlmostEqual(result[\"img\"].shape, expected_val.shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachedataset_parallel.py_TestCacheDatasetParallel_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachedataset_parallel.py_TestCacheDatasetParallel_", "embedding": null, "metadata": {"file_path": "tests/test_cachedataset_parallel.py", "file_name": "test_cachedataset_parallel.py", "file_type": "text/x-python", "category": "test", "start_line": 30, "end_line": 59, "span_ids": ["TestCacheDatasetParallel", "impl:7", "TestCacheDatasetParallel.test_shape"], "tokens": 283}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCacheDatasetParallel(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_shape(self, num_workers, dataset_size, transform):\n test_image = nib.Nifti1Image(np.random.randint(0, 2, size=[8, 8, 8]), np.eye(4))\n with tempfile.TemporaryDirectory() as tempdir:\n nib.save(test_image, os.path.join(tempdir, \"test_image1.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_label1.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_extra1.nii.gz\"))\n test_data = [\n {\n \"image\": os.path.join(tempdir, \"test_image1.nii.gz\"),\n \"label\": os.path.join(tempdir, \"test_label1.nii.gz\"),\n \"extra\": os.path.join(tempdir, \"test_extra1.nii.gz\"),\n }\n ] * dataset_size\n dataset = CacheDataset(\n data=test_data,\n transform=transform,\n cache_rate=1,\n num_workers=num_workers,\n )\n\n self.assertEqual(len(dataset._cache), dataset.cache_num)\n for i in range(dataset.cache_num):\n self.assertIsNotNone(dataset._cache[i])\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cast_to_type.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cast_to_type.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_cast_to_type.py", "file_name": "test_cast_to_type.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 34, "span_ids": ["TestCastToType.test_type", "TestCastToType", "impl:5", "docstring"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import CastToType\n\nTEST_CASE_1 = [{\"dtype\": np.float64}, np.array([[0, 1], [1, 2]], dtype=np.float32), np.float64]\n\nTEST_CASE_2 = [{\"dtype\": torch.float64}, torch.tensor([[0, 1], [1, 2]], dtype=torch.float32), torch.float64]\n\n\nclass TestCastToType(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_type(self, input_param, input_data, expected_type):\n result = CastToType(**input_param)(input_data)\n self.assertEqual(result.dtype, expected_type)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cast_to_typed.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cast_to_typed.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_cast_to_typed.py", "file_name": "test_cast_to_typed.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 46, "span_ids": ["TestCastToTyped", "TestCastToTyped.test_type", "impl:5", "docstring"], "tokens": 277}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import CastToTyped\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\"], \"dtype\": np.float64},\n {\"img\": np.array([[0, 1], [1, 2]], dtype=np.float32), \"seg\": np.array([[0, 1], [1, 2]], dtype=np.int8)},\n {\"img\": np.float64, \"seg\": np.int8},\n]\n\nTEST_CASE_2 = [\n {\"keys\": [\"img\"], \"dtype\": torch.float64},\n {\n \"img\": torch.tensor([[0, 1], [1, 2]], dtype=torch.float32),\n \"seg\": torch.tensor([[0, 1], [1, 2]], dtype=torch.int8),\n },\n {\"img\": torch.float64, \"seg\": torch.int8},\n]\n\n\nclass TestCastToTyped(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_type(self, input_param, input_data, expected_type):\n result = CastToTyped(**input_param)(input_data)\n for k, v in result.items():\n self.assertEqual(v.dtype, expected_type[k])\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_crop.py_TestCenterSpatialCrop_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_crop.py_TestCenterSpatialCrop_", "embedding": null, "metadata": {"file_path": "tests/test_center_spatial_crop.py", "file_name": "test_center_spatial_crop.py", "file_type": "text/x-python", "category": "test", "start_line": 37, "end_line": 51, "span_ids": ["impl:9", "TestCenterSpatialCrop.test_shape", "TestCenterSpatialCrop", "TestCenterSpatialCrop.test_value"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCenterSpatialCrop(unittest.TestCase):\n @parameterized.expand([TEST_CASE_0, TEST_CASE_1, TEST_CASE_3])\n def test_shape(self, input_param, input_data, expected_shape):\n result = CenterSpatialCrop(**input_param)(input_data)\n np.testing.assert_allclose(result.shape, expected_shape)\n\n @parameterized.expand([TEST_CASE_2])\n def test_value(self, input_param, input_data, expected_value):\n result = CenterSpatialCrop(**input_param)(input_data)\n np.testing.assert_allclose(result, expected_value)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_cropd.py_unittest_TEST_CASE_2._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_cropd.py_unittest_TEST_CASE_2._", "embedding": null, "metadata": {"file_path": "tests/test_center_spatial_cropd.py", "file_name": "test_center_spatial_cropd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 35, "span_ids": ["docstring"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import CenterSpatialCropd\n\nTEST_CASE_0 = [\n {\"keys\": \"img\", \"roi_size\": [2, -1, -1]},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 2, 3, 3),\n]\n\nTEST_CASE_1 = [\n {\"keys\": \"img\", \"roi_size\": [2, 2, 2]},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 2, 2, 2),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"img\", \"roi_size\": [2, 2]},\n {\"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]])},\n np.array([[[1, 2], [2, 3]]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_cropd.py_TestCenterSpatialCropd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_center_spatial_cropd.py_TestCenterSpatialCropd_", "embedding": null, "metadata": {"file_path": "tests/test_center_spatial_cropd.py", "file_name": "test_center_spatial_cropd.py", "file_type": "text/x-python", "category": "test", "start_line": 36, "end_line": 50, "span_ids": ["TestCenterSpatialCropd", "TestCenterSpatialCropd.test_value", "impl:7", "TestCenterSpatialCropd.test_shape"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCenterSpatialCropd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_0, TEST_CASE_1])\n def test_shape(self, input_param, input_data, expected_shape):\n result = CenterSpatialCropd(**input_param)(input_data)\n self.assertTupleEqual(result[\"img\"].shape, expected_shape)\n\n @parameterized.expand([TEST_CASE_2])\n def test_value(self, input_param, input_data, expected_value):\n result = CenterSpatialCropd(**input_param)(input_data)\n np.testing.assert_allclose(result[\"img\"], expected_value)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_list_dict_compose_TestCompose.test_list_dict_compose.for_item_in_value_.self_assertDictEqual_item": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_list_dict_compose_TestCompose.test_list_dict_compose.for_item_in_value_.self_assertDictEqual_item", "embedding": null, "metadata": {"file_path": "tests/test_compose.py", "file_name": "test_compose.py", "file_type": "text/x-python", "category": "test", "start_line": 47, "end_line": 67, "span_ids": ["TestCompose.test_list_dict_compose"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCompose(unittest.TestCase):\n\n def test_list_dict_compose(self):\n def a(d): # transform to handle dict data\n d = dict(d)\n d[\"a\"] += 1\n return d\n\n def b(d): # transform to generate a batch list of data\n d = dict(d)\n d[\"b\"] += 1\n d = [d] * 5\n return d\n\n def c(d): # transform to handle dict data\n d = dict(d)\n d[\"c\"] += 1\n return d\n\n transforms = Compose([a, a, b, c, c])\n value = transforms({\"a\": 0, \"b\": 0, \"c\": 0})\n for item in value:\n self.assertDictEqual(item, {\"a\": 2, \"b\": 1, \"c\": 2})", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TestComputeMeanDice_TestComputeMeanDice._DiceMetric_class_tests": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TestComputeMeanDice_TestComputeMeanDice._DiceMetric_class_tests", "embedding": null, "metadata": {"file_path": "tests/test_compute_meandice.py", "file_name": "test_compute_meandice.py", "file_type": "text/x-python", "category": "test", "start_line": 171, "end_line": 182, "span_ids": ["TestComputeMeanDice.test_value", "TestComputeMeanDice", "TestComputeMeanDice.test_nans"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestComputeMeanDice(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_9])\n def test_value(self, input_data, expected_value):\n result = compute_meandice(**input_data)\n np.testing.assert_allclose(result.cpu().numpy(), expected_value, atol=1e-4)\n\n @parameterized.expand([TEST_CASE_3])\n def test_nans(self, input_data, expected_value):\n result = compute_meandice(**input_data)\n self.assertTrue(np.allclose(np.isnan(result.cpu().numpy()), expected_value))\n\n # DiceMetric class tests", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TestComputeMeanDice.test_value_class_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_meandice.py_TestComputeMeanDice.test_value_class_", "embedding": null, "metadata": {"file_path": "tests/test_compute_meandice.py", "file_name": "test_compute_meandice.py", "file_type": "text/x-python", "category": "test", "start_line": 183, "end_line": 204, "span_ids": ["TestComputeMeanDice.test_value_class", "impl:19", "TestComputeMeanDice.test_nans_class"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestComputeMeanDice(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_value_class(self, input_data, expected_value):\n\n # same test as for compute_meandice\n vals = {}\n vals[\"y_pred\"] = input_data.pop(\"y_pred\")\n vals[\"y\"] = input_data.pop(\"y\")\n dice_metric = DiceMetric(**input_data, reduction=\"none\")\n result, _ = dice_metric(**vals)\n np.testing.assert_allclose(result.cpu().numpy(), expected_value, atol=1e-4)\n\n @parameterized.expand([TEST_CASE_4, TEST_CASE_5, TEST_CASE_6, TEST_CASE_7, TEST_CASE_8])\n def test_nans_class(self, params, input_data, expected_value):\n\n dice_metric = DiceMetric(**params)\n result, _ = dice_metric(**input_data)\n np.testing.assert_allclose(result.cpu().numpy(), expected_value, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_concat_itemsd.py_unittest_TestConcatItemsd.test_tensor_values.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_concat_itemsd.py_unittest_TestConcatItemsd.test_tensor_values.None_2", "embedding": null, "metadata": {"file_path": "tests/test_concat_itemsd.py", "file_name": "test_concat_itemsd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 31, "span_ids": ["TestConcatItemsd.test_tensor_values", "TestConcatItemsd", "docstring"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.transforms import ConcatItemsd\n\n\nclass TestConcatItemsd(unittest.TestCase):\n def test_tensor_values(self):\n device = torch.device(\"cuda:0\") if torch.cuda.is_available() else torch.device(\"cpu:0\")\n input_data = {\n \"img1\": torch.tensor([[0, 1], [1, 2]], device=device),\n \"img2\": torch.tensor([[0, 1], [1, 2]], device=device),\n }\n result = ConcatItemsd(keys=[\"img1\", \"img2\"], name=\"cat_img\")(input_data)\n self.assertTrue(\"cat_img\" in result)\n result[\"cat_img\"] += 1\n torch.testing.assert_allclose(result[\"img1\"], torch.tensor([[0, 1], [1, 2]], device=device))\n torch.testing.assert_allclose(result[\"cat_img\"], torch.tensor([[1, 2], [2, 3], [1, 2], [2, 3]], device=device))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestResidualUnit2D_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestResidualUnit2D_", "embedding": null, "metadata": {"file_path": "tests/test_convolutions.py", "file_name": "test_convolutions.py", "file_type": "text/x-python", "category": "test", "start_line": 126, "end_line": 155, "span_ids": ["TestResidualUnit2D.test_conv_only1", "impl", "TestResidualUnit2D", "TestResidualUnit2D.test_stride1", "TestResidualUnit2D.test_dropout1", "TestResidualUnit2D.test_dilation1"], "tokens": 322}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestResidualUnit2D(TorchImageTestCase2D):\n def test_conv_only1(self):\n conv = ResidualUnit(2, 1, self.output_channels)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n def test_stride1(self):\n for strides in [2, [2, 2], (2, 2)]:\n conv = ResidualUnit(2, 1, self.output_channels, strides=strides)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0] // 2, self.im_shape[1] // 2)\n self.assertEqual(out.shape, expected_shape)\n\n def test_dilation1(self):\n conv = ResidualUnit(2, 1, self.output_channels, dilation=3)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n def test_dropout1(self):\n conv = ResidualUnit(2, 1, self.output_channels, dropout=0.15)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_unittest_TEST_CASE_4._img_seg_2_img": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_unittest_TEST_CASE_4._img_seg_2_img", "embedding": null, "metadata": {"file_path": "tests/test_copy_itemsd.py", "file_name": "test_copy_itemsd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 28, "span_ids": ["docstring"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.transforms import CopyItemsd\nfrom monai.utils import ensure_tuple\n\nTEST_CASE_1 = [\"img\", 1, \"img_1\"]\n\nTEST_CASE_2 = [[\"img\", \"seg\"], 1, [\"img_1\", \"seg_1\"]]\n\nTEST_CASE_3 = [\"img\", 2, [\"img_1\", \"img_2\"]]\n\nTEST_CASE_4 = [[\"img\", \"seg\"], 2, [\"img_1\", \"seg_1\", \"img_2\", \"seg_2\"]]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_TestCopyItemsd_TestCopyItemsd.test_numpy_values.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_TestCopyItemsd_TestCopyItemsd.test_numpy_values.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_copy_itemsd.py", "file_name": "test_copy_itemsd.py", "file_type": "text/x-python", "category": "test", "start_line": 28, "end_line": 37, "span_ids": ["TestCopyItemsd", "TestCopyItemsd.test_numpy_values"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCopyItemsd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4])\n def test_numpy_values(self, keys, times, names):\n input_data = {\"img\": np.array([[0, 1], [1, 2]]), \"seg\": np.array([[0, 1], [1, 2]])}\n result = CopyItemsd(keys=keys, times=times, names=names)(input_data)\n for name in ensure_tuple(names):\n self.assertTrue(name in result)\n result[name] += 1\n np.testing.assert_allclose(result[name], np.array([[1, 2], [2, 3]]))\n np.testing.assert_allclose(result[\"img\"], np.array([[0, 1], [1, 2]]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_TestCopyItemsd.test_tensor_values_TestCopyItemsd.test_tensor_values.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_copy_itemsd.py_TestCopyItemsd.test_tensor_values_TestCopyItemsd.test_tensor_values.None_2", "embedding": null, "metadata": {"file_path": "tests/test_copy_itemsd.py", "file_name": "test_copy_itemsd.py", "file_type": "text/x-python", "category": "test", "start_line": 39, "end_line": 49, "span_ids": ["TestCopyItemsd.test_tensor_values"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCopyItemsd(unittest.TestCase):\n\n def test_tensor_values(self):\n device = torch.device(\"cuda:0\") if torch.cuda.is_available() else torch.device(\"cpu:0\")\n input_data = {\n \"img\": torch.tensor([[0, 1], [1, 2]], device=device),\n \"seg\": torch.tensor([[0, 1], [1, 2]], device=device),\n }\n result = CopyItemsd(keys=\"img\", times=1, names=\"img_1\")(input_data)\n self.assertTrue(\"img_1\" in result)\n result[\"img_1\"] += 1\n torch.testing.assert_allclose(result[\"img\"], torch.tensor([[0, 1], [1, 2]], device=device))\n torch.testing.assert_allclose(result[\"img_1\"], torch.tensor([[1, 2], [2, 3]], device=device))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_unittest_TestCreateGrid.test_create_grid.g_13.create_grid_2_2_2_sp": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_unittest_TestCreateGrid.test_create_grid.g_13.create_grid_2_2_2_sp", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 69, "span_ids": ["TestCreateGrid", "TestCreateGrid.test_create_grid", "docstring"], "tokens": 685}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import (\n create_control_grid,\n create_grid,\n create_rotate,\n create_scale,\n create_shear,\n create_translate,\n)\n\n\nclass TestCreateGrid(unittest.TestCase):\n def test_create_grid(self):\n with self.assertRaisesRegex(TypeError, \"\"):\n create_grid(None)\n with self.assertRaisesRegex(TypeError, \"\"):\n create_grid((1, 1), spacing=2.0)\n with self.assertRaisesRegex(TypeError, \"\"):\n create_grid((1, 1), spacing=2.0)\n\n g = create_grid((1, 1))\n expected = np.array([[[0.0]], [[0.0]], [[1.0]]])\n np.testing.assert_allclose(g, expected)\n\n g = create_grid((1, 1), homogeneous=False)\n expected = np.array([[[0.0]], [[0.0]]])\n np.testing.assert_allclose(g, expected)\n\n g = create_grid((1, 1), spacing=(1.2, 1.3))\n expected = np.array([[[0.0]], [[0.0]], [[1.0]]])\n np.testing.assert_allclose(g, expected)\n\n g = create_grid((1, 1, 1), spacing=(1.2, 1.3, 1.0))\n expected = np.array([[[[0.0]]], [[[0.0]]], [[[0.0]]], [[[1.0]]]])\n np.testing.assert_allclose(g, expected)\n\n g = create_grid((1, 1, 1), spacing=(1.2, 1.3, 1.0), homogeneous=False)\n expected = np.array([[[[0.0]]], [[[0.0]]], [[[0.0]]]])\n np.testing.assert_allclose(g, expected)\n\n g = create_grid((1, 1, 1), spacing=(1.2, 1.3, 1.0), dtype=np.int32)\n np.testing.assert_equal(g.dtype, np.int32)\n\n g = create_grid((2, 2, 2))\n expected = np.array(\n [\n [[[-0.5, -0.5], [-0.5, -0.5]], [[0.5, 0.5], [0.5, 0.5]]],\n [[[-0.5, -0.5], [0.5, 0.5]], [[-0.5, -0.5], [0.5, 0.5]]],\n [[[-0.5, 0.5], [-0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]]],\n [[[1.0, 1.0], [1.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]],\n ]\n )\n np.testing.assert_allclose(g, expected)\n\n g = create_grid((2, 2, 2), spacing=(1.2, 1.3, 1.0))\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_grid.expected_14_TestCreateGrid.test_create_grid.None_7": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_grid.expected_14_TestCreateGrid.test_create_grid.None_7", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 70, "end_line": 78, "span_ids": ["TestCreateGrid.test_create_grid"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCreateGrid(unittest.TestCase):\n def test_create_grid(self):\n # ... other code\n expected = np.array(\n [\n [[[-0.6, -0.6], [-0.6, -0.6]], [[0.6, 0.6], [0.6, 0.6]]],\n [[[-0.65, -0.65], [0.65, 0.65]], [[-0.65, -0.65], [0.65, 0.65]]],\n [[[-0.5, 0.5], [-0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]]],\n [[[1.0, 1.0], [1.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]],\n ]\n )\n np.testing.assert_allclose(g, expected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid_TestCreateGrid.test_create_control_grid.g_6.create_control_grid_2_0_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid_TestCreateGrid.test_create_control_grid.g_6.create_control_grid_2_0_", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 80, "end_line": 116, "span_ids": ["TestCreateGrid.test_create_control_grid"], "tokens": 756}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCreateGrid(unittest.TestCase):\n\n def test_create_control_grid(self):\n with self.assertRaisesRegex(TypeError, \"\"):\n create_control_grid(None, None)\n with self.assertRaisesRegex(TypeError, \"\"):\n create_control_grid((1, 1), 2.0)\n\n g = create_control_grid((1.0, 1.0), (1.0, 1.0))\n expected = np.array(\n [\n [[-1.0, -1.0, -1.0], [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]],\n [[-1.0, 0.0, 1.0], [-1.0, 0.0, 1.0], [-1.0, 0.0, 1.0]],\n [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],\n ]\n )\n np.testing.assert_allclose(g, expected)\n\n g = create_control_grid((1.0, 1.0), (2.0, 2.0))\n expected = np.array(\n [\n [[-2.0, -2.0, -2.0], [0.0, 0.0, 0.0], [2.0, 2.0, 2.0]],\n [[-2.0, 0.0, 2.0], [-2.0, 0.0, 2.0], [-2.0, 0.0, 2.0]],\n [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],\n ]\n )\n np.testing.assert_allclose(g, expected)\n\n g = create_control_grid((2.0, 2.0), (1.0, 1.0))\n expected = np.array(\n [\n [[-1.5, -1.5, -1.5, -1.5], [-0.5, -0.5, -0.5, -0.5], [0.5, 0.5, 0.5, 0.5], [1.5, 1.5, 1.5, 1.5]],\n [[-1.5, -0.5, 0.5, 1.5], [-1.5, -0.5, 0.5, 1.5], [-1.5, -0.5, 0.5, 1.5], [-1.5, -0.5, 0.5, 1.5]],\n [[1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0]],\n ]\n )\n np.testing.assert_allclose(g, expected)\n\n g = create_control_grid((2.0, 2.0), (2.0, 2.0))\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid.expected_7_TestCreateGrid.test_create_control_grid.g_8.create_control_grid_1_0_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid.expected_7_TestCreateGrid.test_create_control_grid.g_8.create_control_grid_1_0_", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 117, "end_line": 126, "span_ids": ["TestCreateGrid.test_create_control_grid"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCreateGrid(unittest.TestCase):\n\n def test_create_control_grid(self):\n # ... other code\n expected = np.array(\n [\n [[-3.0, -3.0, -3.0, -3.0], [-1.0, -1.0, -1.0, -1.0], [1.0, 1.0, 1.0, 1.0], [3.0, 3.0, 3.0, 3.0]],\n [[-3.0, -1.0, 1.0, 3.0], [-3.0, -1.0, 1.0, 3.0], [-3.0, -1.0, 1.0, 3.0], [-3.0, -1.0, 1.0, 3.0]],\n [[1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0]],\n ]\n )\n np.testing.assert_allclose(g, expected)\n\n g = create_control_grid((1.0, 1.0, 1.0), (2.0, 2.0, 2.0), homogeneous=False)\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid.expected_9_test_assert.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateGrid.test_create_control_grid.expected_9_test_assert.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 127, "end_line": 151, "span_ids": ["test_assert", "TestCreateGrid.test_create_control_grid"], "tokens": 509}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCreateGrid(unittest.TestCase):\n\n def test_create_control_grid(self):\n # ... other code\n expected = np.array(\n [\n [\n [[-2.0, -2.0, -2.0], [-2.0, -2.0, -2.0], [-2.0, -2.0, -2.0]],\n [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]],\n [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]],\n ],\n [\n [[-2.0, -2.0, -2.0], [0.0, 0.0, 0.0], [2.0, 2.0, 2.0]],\n [[-2.0, -2.0, -2.0], [0.0, 0.0, 0.0], [2.0, 2.0, 2.0]],\n [[-2.0, -2.0, -2.0], [0.0, 0.0, 0.0], [2.0, 2.0, 2.0]],\n ],\n [\n [[-2.0, 0.0, 2.0], [-2.0, 0.0, 2.0], [-2.0, 0.0, 2.0]],\n [[-2.0, 0.0, 2.0], [-2.0, 0.0, 2.0], [-2.0, 0.0, 2.0]],\n [[-2.0, 0.0, 2.0], [-2.0, 0.0, 2.0], [-2.0, 0.0, 2.0]],\n ],\n ]\n )\n np.testing.assert_allclose(g, expected)\n\n\ndef test_assert(func, params, expected):\n m = func(*params)\n np.testing.assert_allclose(m, expected, atol=1e-7)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine_TestCreateAffine.test_create_rotate.None_4": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine_TestCreateAffine.test_create_rotate.None_4", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 154, "end_line": 207, "span_ids": ["TestCreateAffine", "TestCreateAffine.test_create_rotate"], "tokens": 626}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCreateAffine(unittest.TestCase):\n def test_create_rotate(self):\n with self.assertRaisesRegex(TypeError, \"\"):\n create_rotate(2, None)\n\n with self.assertRaisesRegex(ValueError, \"\"):\n create_rotate(5, 1)\n\n test_assert(\n create_rotate,\n (2, 1.1),\n np.array([[0.45359612, -0.89120736, 0.0], [0.89120736, 0.45359612, 0.0], [0.0, 0.0, 1.0]]),\n )\n test_assert(\n create_rotate,\n (3, 1.1),\n np.array(\n [\n [1.0, 0.0, 0.0, 0.0],\n [0.0, 0.45359612, -0.89120736, 0.0],\n [0.0, 0.89120736, 0.45359612, 0.0],\n [0.0, 0.0, 0.0, 1.0],\n ]\n ),\n )\n test_assert(\n create_rotate,\n (3, (1.1, 1)),\n np.array(\n [\n [0.54030231, 0.0, 0.84147098, 0.0],\n [0.74992513, 0.45359612, -0.48152139, 0.0],\n [-0.38168798, 0.89120736, 0.24507903, 0.0],\n [0.0, 0.0, 0.0, 1.0],\n ]\n ),\n )\n test_assert(\n create_rotate,\n (3, (1, 1, 1.1)),\n np.array(\n [\n [0.24507903, -0.48152139, 0.84147098, 0.0],\n [0.80270075, -0.38596121, -0.45464871, 0.0],\n [0.54369824, 0.78687425, 0.29192658, 0.0],\n [0.0, 0.0, 0.0, 1.0],\n ]\n ),\n )\n test_assert(\n create_rotate,\n (3, (0, 0, np.pi / 2)),\n np.array([[0.0, -1.0, 0.0, 0.0], [1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_shear_TestCreateAffine.test_create_shear.test_assert_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_shear_TestCreateAffine.test_create_shear.test_assert_", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 209, "end_line": 216, "span_ids": ["TestCreateAffine.test_create_shear"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCreateAffine(unittest.TestCase):\n\n def test_create_shear(self):\n test_assert(create_shear, (2, 1.0), np.array([[1.0, 1.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]))\n test_assert(create_shear, (2, (2.0, 3.0)), np.array([[1.0, 2.0, 0.0], [3.0, 1.0, 0.0], [0.0, 0.0, 1.0]]))\n test_assert(\n create_shear,\n (3, 1.0),\n np.array([[1.0, 1.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_scale_TestCreateAffine.test_create_scale.None_4": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_scale_TestCreateAffine.test_create_scale.None_4", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 218, "end_line": 235, "span_ids": ["TestCreateAffine.test_create_scale"], "tokens": 465}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCreateAffine(unittest.TestCase):\n\n def test_create_scale(self):\n test_assert(create_scale, (2, 2), np.array([[2.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]))\n test_assert(create_scale, (2, [2, 2, 2]), np.array([[2.0, 0.0, 0.0], [0.0, 2.0, 0.0], [0.0, 0.0, 1.0]]))\n test_assert(\n create_scale,\n (3, [1.5, 2.4]),\n np.array([[1.5, 0.0, 0.0, 0.0], [0.0, 2.4, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]),\n )\n test_assert(\n create_scale,\n (3, 1.5),\n np.array([[1.5, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]),\n )\n test_assert(\n create_scale,\n (3, [1, 2, 3, 4, 5]),\n np.array([[1.0, 0.0, 0.0, 0.0], [0.0, 2.0, 0.0, 0.0], [0.0, 0.0, 3.0, 0.0], [0.0, 0.0, 0.0, 1.0]]),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_translate_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_create_grid_and_affine.py_TestCreateAffine.test_create_translate_", "embedding": null, "metadata": {"file_path": "tests/test_create_grid_and_affine.py", "file_name": "test_create_grid_and_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 237, "end_line": 259, "span_ids": ["TestCreateAffine.test_create_translate", "impl"], "tokens": 477}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCreateAffine(unittest.TestCase):\n\n def test_create_translate(self):\n test_assert(create_translate, (2, 2), np.array([[1.0, 0.0, 2.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]))\n test_assert(create_translate, (2, [2, 2, 2]), np.array([[1.0, 0.0, 2.0], [0.0, 1.0, 2.0], [0.0, 0.0, 1.0]]))\n test_assert(\n create_translate,\n (3, [1.5, 2.4]),\n np.array([[1.0, 0.0, 0.0, 1.5], [0.0, 1.0, 0.0, 2.4], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]),\n )\n test_assert(\n create_translate,\n (3, 1.5),\n np.array([[1.0, 0.0, 0.0, 1.5], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]),\n )\n test_assert(\n create_translate,\n (3, [1, 2, 3, 4, 5]),\n np.array([[1.0, 0.0, 0.0, 1.0], [0.0, 1.0, 0.0, 2.0], [0.0, 0.0, 1.0, 3.0], [0.0, 0.0, 0.0, 1.0]]),\n )\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foregroundd.py_unittest_TEST_CASE_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foregroundd.py_unittest_TEST_CASE_3._", "embedding": null, "metadata": {"file_path": "tests/test_crop_foregroundd.py", "file_name": "test_crop_foregroundd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["impl:5", "docstring"], "tokens": 581}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import CropForegroundd\n\nTEST_CASE_1 = [\n {\n \"keys\": [\"img\", \"label\"],\n \"source_key\": \"label\",\n \"select_fn\": lambda x: x > 0,\n \"channel_indices\": None,\n \"margin\": 0,\n },\n {\n \"img\": np.array([[[1, 0, 2, 0, 1], [0, 1, 2, 1, 0], [2, 2, 3, 2, 2], [0, 1, 2, 1, 0], [1, 0, 2, 0, 1]]]),\n \"label\": np.array([[[0, 0, 0, 0, 0], [0, 1, 0, 1, 0], [0, 0, 1, 0, 0], [0, 1, 0, 1, 0], [0, 0, 0, 0, 0]]]),\n },\n np.array([[[1, 2, 1], [2, 3, 2], [1, 2, 1]]]),\n]\n\nTEST_CASE_2 = [\n {\"keys\": [\"img\"], \"source_key\": \"img\", \"select_fn\": lambda x: x > 1, \"channel_indices\": None, \"margin\": 0},\n {\"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 3, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]]])},\n np.array([[[3]]]),\n]\n\nTEST_CASE_3 = [\n {\"keys\": [\"img\"], \"source_key\": \"img\", \"select_fn\": lambda x: x > 0, \"channel_indices\": 0, \"margin\": 0},\n {\"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]])},\n np.array([[[1, 2, 1], [2, 3, 2], [1, 2, 1]]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_stats.py_TestDataStats_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_stats.py_TestDataStats_", "embedding": null, "metadata": {"file_path": "tests/test_data_stats.py", "file_name": "test_data_stats.py", "file_type": "text/x-python", "category": "test", "start_line": 134, "end_line": 168, "span_ids": ["impl:17", "TestDataStats.test_file", "TestDataStats.test_value", "TestDataStats"], "tokens": 287}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDataStats(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5, TEST_CASE_6, TEST_CASE_7])\n def test_value(self, input_param, input_data, expected_print):\n transform = DataStats(**input_param)\n _ = transform(input_data)\n self.assertEqual(transform.output, expected_print)\n\n @parameterized.expand([TEST_CASE_8])\n def test_file(self, input_data, expected_print):\n with tempfile.TemporaryDirectory() as tempdir:\n filename = os.path.join(tempdir, \"test_data_stats.log\")\n handler = logging.FileHandler(filename, mode=\"w\")\n handler.setLevel(logging.INFO)\n input_param = {\n \"prefix\": \"test data\",\n \"data_type\": True,\n \"data_shape\": True,\n \"value_range\": True,\n \"data_value\": True,\n \"additional_info\": np.mean,\n \"logger_handler\": handler,\n }\n transform = DataStats(**input_param)\n _ = transform(input_data)\n for h in transform._logger.handlers[:]:\n h.close()\n transform._logger.removeHandler(h)\n with open(filename, \"r\") as f:\n content = f.read()\n self.assertEqual(content, expected_print)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_statsd.py_TestDataStatsd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_statsd.py_TestDataStatsd_", "embedding": null, "metadata": {"file_path": "tests/test_data_statsd.py", "file_name": "test_data_statsd.py", "file_type": "text/x-python", "category": "test", "start_line": 155, "end_line": 201, "span_ids": ["TestDataStatsd.test_file", "TestDataStatsd", "TestDataStatsd.test_value", "impl:19"], "tokens": 318}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDataStatsd(unittest.TestCase):\n @parameterized.expand(\n [\n TEST_CASE_1,\n TEST_CASE_2,\n TEST_CASE_3,\n TEST_CASE_4,\n TEST_CASE_5,\n TEST_CASE_6,\n TEST_CASE_7,\n TEST_CASE_8,\n ]\n )\n def test_value(self, input_param, input_data, expected_print):\n transform = DataStatsd(**input_param)\n _ = transform(input_data)\n self.assertEqual(transform.printer.output, expected_print)\n\n @parameterized.expand([TEST_CASE_9])\n def test_file(self, input_data, expected_print):\n with tempfile.TemporaryDirectory() as tempdir:\n filename = os.path.join(tempdir, \"test_stats.log\")\n handler = logging.FileHandler(filename, mode=\"w\")\n handler.setLevel(logging.INFO)\n input_param = {\n \"keys\": \"img\",\n \"prefix\": \"test data\",\n \"data_shape\": True,\n \"value_range\": True,\n \"data_value\": True,\n \"additional_info\": np.mean,\n \"logger_handler\": handler,\n }\n transform = DataStatsd(**input_param)\n _ = transform(input_data)\n for h in transform.printer._logger.handlers[:]:\n h.close()\n transform.printer._logger.removeHandler(h)\n del handler\n with open(filename, \"r\") as f:\n content = f.read()\n self.assertEqual(content, expected_print)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_densenet.py_TestDENSENET_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_densenet.py_TestDENSENET_", "embedding": null, "metadata": {"file_path": "tests/test_densenet.py", "file_name": "test_densenet.py", "file_type": "text/x-python", "category": "test", "start_line": 104, "end_line": 121, "span_ids": ["TestDENSENET.test_densenet_shape", "TestDENSENET.test_script", "TestDENSENET", "impl:27"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDENSENET(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_densenet_shape(self, model, input_param, input_shape, expected_shape):\n net = model(**input_param).to(device)\n with eval_mode(net):\n result = net.forward(torch.randn(input_shape).to(device))\n self.assertEqual(result.shape, expected_shape)\n\n @parameterized.expand(TEST_SCRIPT_CASES)\n def test_script(self, model, input_param, input_shape, expected_shape):\n net = model(**input_param)\n test_data = torch.randn(input_shape)\n test_script_save(net, test_data)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_unittest_TEST_CASES": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_unittest_TEST_CASES", "embedding": null, "metadata": {"file_path": "tests/test_dice_loss.py", "file_name": "test_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 162, "span_ids": ["docstring"], "tokens": 51}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.losses import DiceLoss\nfrom tests.utils import SkipIfBeforePyTorchVersion, test_script_save\n\nTEST_CASES =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_TestDiceLoss_TestDiceLoss.test_ill_opts.None_2.DiceLoss_reduction_None_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_TestDiceLoss_TestDiceLoss.test_ill_opts.None_2.DiceLoss_reduction_None_", "embedding": null, "metadata": {"file_path": "tests/test_dice_loss.py", "file_name": "test_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 114, "end_line": 133, "span_ids": ["TestDiceLoss.test_shape", "TestDiceLoss.test_ill_shape", "TestDiceLoss", "TestDiceLoss.test_ill_opts"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDiceLoss(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_shape(self, input_param, input_data, expected_val):\n result = DiceLoss(**input_param).forward(**input_data)\n np.testing.assert_allclose(result.detach().cpu().numpy(), expected_val, rtol=1e-5)\n\n def test_ill_shape(self):\n loss = DiceLoss()\n with self.assertRaisesRegex(AssertionError, \"\"):\n loss.forward(torch.ones((1, 2, 3)), torch.ones((4, 5, 6)))\n\n def test_ill_opts(self):\n with self.assertRaisesRegex(ValueError, \"\"):\n DiceLoss(sigmoid=True, softmax=True)\n chn_input = torch.ones((1, 1, 3))\n chn_target = torch.ones((1, 1, 3))\n with self.assertRaisesRegex(ValueError, \"\"):\n DiceLoss(reduction=\"unknown\")(chn_input, chn_target)\n with self.assertRaisesRegex(ValueError, \"\"):\n DiceLoss(reduction=None)(chn_input, chn_target)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_TestDiceLoss.test_input_warnings_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dice_loss.py_TestDiceLoss.test_input_warnings_", "embedding": null, "metadata": {"file_path": "tests/test_dice_loss.py", "file_name": "test_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 186, "end_line": 208, "span_ids": ["TestDiceLoss.test_script", "impl:3", "TestDiceLoss.test_input_warnings"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDiceLoss(unittest.TestCase):\n\n def test_input_warnings(self):\n chn_input = torch.ones((1, 1, 3))\n chn_target = torch.ones((1, 1, 3))\n with self.assertWarns(Warning):\n loss = DiceLoss(include_background=False)\n loss.forward(chn_input, chn_target)\n with self.assertWarns(Warning):\n loss = DiceLoss(softmax=True)\n loss.forward(chn_input, chn_target)\n with self.assertWarns(Warning):\n loss = DiceLoss(to_onehot_y=True)\n loss.forward(chn_input, chn_target)\n\n @SkipIfBeforePyTorchVersion((1, 7, 0))\n def test_script(self):\n loss = DiceLoss()\n test_input = torch.ones(2, 1, 8, 8)\n test_script_save(loss, test_input, test_input)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_divisible_pad.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_divisible_pad.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_divisible_pad.py", "file_name": "test_divisible_pad.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 46, "span_ids": ["TestDivisiblePad", "TestDivisiblePad.test_pad_shape", "impl:5", "docstring"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import DivisiblePad\n\n# pad first dim to be divisible by 7, the second unchanged.\nTEST_CASE_1 = [\n {\"k\": (7, -1), \"mode\": \"constant\"},\n np.zeros((3, 8, 7)),\n np.zeros((3, 14, 7)),\n]\n\n# pad all dimensions to be divisible by 5\nTEST_CASE_2 = [\n {\"k\": 5, \"mode\": \"constant\"},\n np.zeros((3, 10, 5, 17)),\n np.zeros((3, 10, 5, 20)),\n]\n\n\nclass TestDivisiblePad(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_pad_shape(self, input_param, input_data, expected_val):\n padder = DivisiblePad(**input_param)\n result = padder(input_data)\n self.assertAlmostEqual(result.shape, expected_val.shape)\n result = padder(input_data, mode=input_param[\"mode\"])\n self.assertAlmostEqual(result.shape, expected_val.shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_divisible_padd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_divisible_padd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_divisible_padd.py", "file_name": "test_divisible_padd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 48, "span_ids": ["TestDivisiblePadd.test_pad_shape", "TestDivisiblePadd", "impl:7", "docstring"], "tokens": 284}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import DivisiblePadd\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\"], \"k\": [4, 3, 2], \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 8, 9, 4)),\n]\n\nTEST_CASE_2 = [\n {\"keys\": [\"img\"], \"k\": 7, \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 7))},\n np.zeros((3, 14, 7)),\n]\n\nTEST_CASE_3 = [\n {\"keys\": [\"img\"], \"k\": 0, \"mode\": {\"constant\"}},\n {\"img\": np.zeros((3, 8))},\n np.zeros((3, 8)),\n]\n\n\nclass TestDivisiblePadd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_pad_shape(self, input_param, input_data, expected_val):\n padder = DivisiblePadd(**input_param)\n result = padder(input_data)\n np.testing.assert_allclose(result[\"img\"], expected_val)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_download_and_extract.py_os_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_download_and_extract.py_os_", "embedding": null, "metadata": {"file_path": "tests/test_download_and_extract.py", "file_name": "test_download_and_extract.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 56, "span_ids": ["TestDownloadAndExtract.test_actions", "TestDownloadAndExtract", "impl", "docstring"], "tokens": 401}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport unittest\nfrom urllib.error import ContentTooShortError, HTTPError\n\nfrom monai.apps import download_and_extract, download_url, extractall\nfrom tests.utils import skip_if_quick\n\n\nclass TestDownloadAndExtract(unittest.TestCase):\n @skip_if_quick\n def test_actions(self):\n testing_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"testing_data\")\n url = \"https://www.dropbox.com/s/5wwskxctvcxiuea/MedNIST.tar.gz?dl=1\"\n filepath = os.path.join(testing_dir, \"MedNIST.tar.gz\")\n output_dir = testing_dir\n md5_value = \"0bc7306e7427e00ad1c5526a6677552d\"\n try:\n download_and_extract(url, filepath, output_dir, md5_value)\n download_and_extract(url, filepath, output_dir, md5_value)\n except (ContentTooShortError, HTTPError, RuntimeError) as e:\n print(str(e))\n if isinstance(e, RuntimeError):\n # FIXME: skip MD5 check as current downloading method may fail\n self.assertTrue(str(e).startswith(\"md5 check\"))\n return # skipping this test due the network connection errors\n\n wrong_md5 = \"0\"\n try:\n download_url(url, filepath, wrong_md5)\n except (ContentTooShortError, HTTPError, RuntimeError) as e:\n print(str(e))\n if isinstance(e, RuntimeError):\n # FIXME: skip MD5 check as current downloading method may fail\n self.assertTrue(str(e).startswith(\"md5 check\"))\n return # skipping this test due the network connection errors\n\n try:\n extractall(filepath, output_dir, wrong_md5)\n except RuntimeError as e:\n self.assertTrue(str(e).startswith(\"md5 check\"))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_downsample_block.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_downsample_block.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_downsample_block.py", "file_name": "test_downsample_block.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 52, "span_ids": ["TestMaxAvgPool", "impl:3", "TestMaxAvgPool.test_shape", "docstring"], "tokens": 418}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.blocks import MaxAvgPool\n\nTEST_CASES = [\n [{\"spatial_dims\": 2, \"kernel_size\": 2}, (7, 4, 64, 48), (7, 8, 32, 24)], # 4-channel 2D, batch 7\n [{\"spatial_dims\": 1, \"kernel_size\": 4}, (16, 4, 63), (16, 8, 15)], # 4-channel 1D, batch 16\n [ # 4-channel 1D, batch 16\n {\"spatial_dims\": 1, \"kernel_size\": 4, \"padding\": 1},\n (16, 4, 63),\n (16, 8, 16),\n ],\n [ # 4-channel 3D, batch 16\n {\"spatial_dims\": 3, \"kernel_size\": 3, \"ceil_mode\": True},\n (16, 4, 32, 24, 48),\n (16, 8, 11, 8, 16),\n ],\n [ # 1-channel 3D, batch 16\n {\"spatial_dims\": 3, \"kernel_size\": 3, \"ceil_mode\": False},\n (16, 1, 32, 24, 48),\n (16, 2, 10, 8, 16),\n ],\n]\n\n\nclass TestMaxAvgPool(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_shape(self, input_param, input_shape, expected_shape):\n net = MaxAvgPool(**input_param)\n with eval_mode(net):\n result = net(torch.randn(input_shape))\n self.assertEqual(result.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_flip.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_flip.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_flip.py", "file_name": "test_flip.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["TestFlip.test_invalid_inputs", "TestFlip.test_correct_results", "impl:5", "TestFlip", "docstring"], "tokens": 244}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import Flip\nfrom tests.utils import NumpyImageTestCase2D\n\nINVALID_CASES = [(\"wrong_axis\", [\"s\", 1], TypeError), (\"not_numbers\", \"s\", TypeError)]\n\nVALID_CASES = [(\"no_axis\", None), (\"one_axis\", 1), (\"many_axis\", [0, 1]), (\"negative_axis\", [0, -1])]\n\n\nclass TestFlip(NumpyImageTestCase2D):\n @parameterized.expand(INVALID_CASES)\n def test_invalid_inputs(self, _, spatial_axis, raises):\n with self.assertRaises(raises):\n flip = Flip(spatial_axis)\n flip(self.imt[0])\n\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, _, spatial_axis):\n flip = Flip(spatial_axis=spatial_axis)\n expected = []\n for channel in self.imt[0]:\n expected.append(np.flip(channel, spatial_axis))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(expected, flip(self.imt[0])))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_flipd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_flipd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_flipd.py", "file_name": "test_flipd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 45, "span_ids": ["TestFlipd.test_invalid_cases", "TestFlipd.test_correct_results", "impl:5", "docstring", "TestFlipd"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import Flipd\nfrom tests.utils import NumpyImageTestCase2D\n\nINVALID_CASES = [(\"wrong_axis\", [\"s\", 1], TypeError), (\"not_numbers\", \"s\", TypeError)]\n\nVALID_CASES = [(\"no_axis\", None), (\"one_axis\", 1), (\"many_axis\", [0, 1])]\n\n\nclass TestFlipd(NumpyImageTestCase2D):\n @parameterized.expand(INVALID_CASES)\n def test_invalid_cases(self, _, spatial_axis, raises):\n with self.assertRaises(raises):\n flip = Flipd(keys=\"img\", spatial_axis=spatial_axis)\n flip({\"img\": self.imt[0]})\n\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, _, spatial_axis):\n flip = Flipd(keys=\"img\", spatial_axis=spatial_axis)\n expected = []\n for channel in self.imt[0]:\n expected.append(np.flip(channel, spatial_axis))\n expected = np.stack(expected)\n res = flip({\"img\": self.imt[0]})\n assert np.allclose(expected, res[\"img\"])\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_unittest_TestFocalLoss.test_consistency_with_cross_entropy_2d.self_assertAlmostEqual_ma": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_unittest_TestFocalLoss.test_consistency_with_cross_entropy_2d.self_assertAlmostEqual_ma", "embedding": null, "metadata": {"file_path": "tests/test_focal_loss.py", "file_name": "test_focal_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 45, "span_ids": ["TestFocalLoss", "TestFocalLoss.test_consistency_with_cross_entropy_2d", "docstring"], "tokens": 340}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom monai.losses import FocalLoss\nfrom monai.networks import one_hot\nfrom tests.utils import SkipIfBeforePyTorchVersion, test_script_save\n\n\nclass TestFocalLoss(unittest.TestCase):\n def test_consistency_with_cross_entropy_2d(self):\n # For gamma=0 the focal loss reduces to the cross entropy loss\n focal_loss = FocalLoss(to_onehot_y=True, gamma=0.0, reduction=\"mean\", weight=1.0)\n ce = nn.CrossEntropyLoss(reduction=\"mean\")\n max_error = 0\n class_num = 10\n batch_size = 128\n for _ in range(100):\n # Create a random tensor of shape (batch_size, class_num, 8, 4)\n x = torch.rand(batch_size, class_num, 8, 4, requires_grad=True)\n # Create a random batch of classes\n l = torch.randint(low=0, high=class_num, size=(batch_size, 1, 8, 4))\n if torch.cuda.is_available():\n x = x.cuda()\n l = l.cuda()\n output0 = focal_loss(x, l)\n output1 = ce(x, l[:, 0]) / class_num\n a = float(output0.cpu().detach())\n b = float(output1.cpu().detach())\n if abs(a - b) > max_error:\n max_error = abs(a - b)\n self.assertAlmostEqual(max_error, 0.0, places=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_consistency_with_cross_entropy_classification_TestFocalLoss.test_consistency_with_cross_entropy_classification.self_assertAlmostEqual_ma": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_consistency_with_cross_entropy_classification_TestFocalLoss.test_consistency_with_cross_entropy_classification.self_assertAlmostEqual_ma", "embedding": null, "metadata": {"file_path": "tests/test_focal_loss.py", "file_name": "test_focal_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 70, "end_line": 92, "span_ids": ["TestFocalLoss.test_consistency_with_cross_entropy_classification"], "tokens": 267}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFocalLoss(unittest.TestCase):\n\n def test_consistency_with_cross_entropy_classification(self):\n # for gamma=0 the focal loss reduces to the cross entropy loss\n focal_loss = FocalLoss(to_onehot_y=True, gamma=0.0, reduction=\"mean\")\n ce = nn.CrossEntropyLoss(reduction=\"mean\")\n max_error = 0\n class_num = 10\n batch_size = 128\n for _ in range(100):\n # Create a random scores tensor of shape (batch_size, class_num)\n x = torch.rand(batch_size, class_num, requires_grad=True)\n # Create a random batch of classes\n l = torch.randint(low=0, high=class_num, size=(batch_size, 1))\n l = l.long()\n if torch.cuda.is_available():\n x = x.cuda()\n l = l.cuda()\n output0 = focal_loss(x, l)\n output1 = ce(x, l[:, 0]) / class_num\n a = float(output0.cpu().detach())\n b = float(output1.cpu().detach())\n if abs(a - b) > max_error:\n max_error = abs(a - b)\n self.assertAlmostEqual(max_error, 0.0, places=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_bin_seg_2d_TestFocalLoss.test_bin_seg_2d.self_assertAlmostEqual_fo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_bin_seg_2d_TestFocalLoss.test_bin_seg_2d.self_assertAlmostEqual_fo", "embedding": null, "metadata": {"file_path": "tests/test_focal_loss.py", "file_name": "test_focal_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 94, "end_line": 107, "span_ids": ["TestFocalLoss.test_bin_seg_2d"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFocalLoss(unittest.TestCase):\n\n def test_bin_seg_2d(self):\n # define 2d examples\n target = torch.tensor([[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]])\n # add another dimension corresponding to the batch (batch size = 1 here)\n target = target.unsqueeze(0) # shape (1, H, W)\n pred_very_good = 1000 * F.one_hot(target, num_classes=2).permute(0, 3, 1, 2).float()\n\n # initialize the mean dice loss\n loss = FocalLoss(to_onehot_y=True)\n\n # focal loss for pred_very_good should be close to 0\n target = target.unsqueeze(1) # shape (1, 1, H, W)\n focal_loss_good = float(loss(pred_very_good, target).cpu())\n self.assertAlmostEqual(focal_loss_good, 0.0, places=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_empty_class_2d_TestFocalLoss.test_empty_class_2d.self_assertAlmostEqual_fo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_empty_class_2d_TestFocalLoss.test_empty_class_2d.self_assertAlmostEqual_fo", "embedding": null, "metadata": {"file_path": "tests/test_focal_loss.py", "file_name": "test_focal_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 109, "end_line": 123, "span_ids": ["TestFocalLoss.test_empty_class_2d"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFocalLoss(unittest.TestCase):\n\n def test_empty_class_2d(self):\n num_classes = 2\n # define 2d examples\n target = torch.tensor([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])\n # add another dimension corresponding to the batch (batch size = 1 here)\n target = target.unsqueeze(0) # shape (1, H, W)\n pred_very_good = 1000 * F.one_hot(target, num_classes=num_classes).permute(0, 3, 1, 2).float()\n\n # initialize the mean dice loss\n loss = FocalLoss(to_onehot_y=True)\n\n # focal loss for pred_very_good should be close to 0\n target = target.unsqueeze(1) # shape (1, 1, H, W)\n focal_loss_good = float(loss(pred_very_good, target).cpu())\n self.assertAlmostEqual(focal_loss_good, 0.0, places=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_multi_class_seg_2d_TestFocalLoss.test_multi_class_seg_2d.None_1": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_multi_class_seg_2d_TestFocalLoss.test_multi_class_seg_2d.None_1", "embedding": null, "metadata": {"file_path": "tests/test_focal_loss.py", "file_name": "test_focal_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 125, "end_line": 144, "span_ids": ["TestFocalLoss.test_multi_class_seg_2d"], "tokens": 342}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFocalLoss(unittest.TestCase):\n\n def test_multi_class_seg_2d(self):\n num_classes = 6 # labels 0 to 5\n # define 2d examples\n target = torch.tensor([[0, 0, 0, 0], [0, 1, 2, 0], [0, 3, 4, 0], [0, 0, 0, 0]])\n # add another dimension corresponding to the batch (batch size = 1 here)\n target = target.unsqueeze(0) # shape (1, H, W)\n pred_very_good = 1000 * F.one_hot(target, num_classes=num_classes).permute(0, 3, 1, 2).float()\n # initialize the mean dice loss\n loss = FocalLoss(to_onehot_y=True)\n loss_onehot = FocalLoss(to_onehot_y=False)\n\n # focal loss for pred_very_good should be close to 0\n target_one_hot = F.one_hot(target, num_classes=num_classes).permute(0, 3, 1, 2) # test one hot\n target = target.unsqueeze(1) # shape (1, 1, H, W)\n\n focal_loss_good = float(loss(pred_very_good, target).cpu())\n self.assertAlmostEqual(focal_loss_good, 0.0, places=3)\n\n focal_loss_good = float(loss_onehot(pred_very_good, target_one_hot).cpu())\n self.assertAlmostEqual(focal_loss_good, 0.0, places=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_bin_seg_3d_TestFocalLoss.test_bin_seg_3d.None_1": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_focal_loss.py_TestFocalLoss.test_bin_seg_3d_TestFocalLoss.test_bin_seg_3d.None_1", "embedding": null, "metadata": {"file_path": "tests/test_focal_loss.py", "file_name": "test_focal_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 146, "end_line": 174, "span_ids": ["TestFocalLoss.test_bin_seg_3d"], "tokens": 473}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFocalLoss(unittest.TestCase):\n\n def test_bin_seg_3d(self):\n num_classes = 2 # labels 0, 1\n # define 3d examples\n target = torch.tensor(\n [\n # raw 0\n [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]],\n # raw 1\n [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]],\n # raw 2\n [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]],\n ]\n )\n # add another dimension corresponding to the batch (batch size = 1 here)\n target = target.unsqueeze(0) # shape (1, H, W, D)\n target_one_hot = F.one_hot(target, num_classes=num_classes).permute(0, 4, 1, 2, 3) # test one hot\n pred_very_good = 1000 * F.one_hot(target, num_classes=num_classes).permute(0, 4, 1, 2, 3).float()\n\n # initialize the mean dice loss\n loss = FocalLoss(to_onehot_y=True)\n loss_onehot = FocalLoss(to_onehot_y=False)\n\n # focal loss for pred_very_good should be close to 0\n target = target.unsqueeze(1) # shape (1, 1, H, W)\n focal_loss_good = float(loss(pred_very_good, target).cpu())\n self.assertAlmostEqual(focal_loss_good, 0.0, places=3)\n\n focal_loss_good = float(loss_onehot(pred_very_good, target_one_hot).cpu())\n self.assertAlmostEqual(focal_loss_good, 0.0, places=3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_2d_GaussianFilterTestCase.test_2d.if_torch_cuda_is_availabl.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_2d_GaussianFilterTestCase.test_2d.if_torch_cuda_is_availabl.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_gaussian_filter.py", "file_name": "test_gaussian_filter.py", "file_type": "text/x-python", "category": "test", "start_line": 122, "end_line": 141, "span_ids": ["GaussianFilterTestCase.test_2d"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianFilterTestCase(unittest.TestCase):\n\n @skip_if_quick\n def test_2d(self):\n a = torch.ones(1, 1, 3, 3)\n g = GaussianFilter(2, 3, 3).to(torch.device(\"cpu:0\"))\n expected = np.array(\n [\n [\n [\n [0.13239081, 0.13932934, 0.13239081],\n [0.13932936, 0.14663152, 0.13932936],\n [0.13239081, 0.13932934, 0.13239081],\n ]\n ]\n ]\n )\n\n np.testing.assert_allclose(g(a).cpu().numpy(), expected, rtol=1e-5)\n if torch.cuda.is_available():\n g = GaussianFilter(2, 3, 3).to(torch.device(\"cuda:0\"))\n np.testing.assert_allclose(g(a.cuda()).cpu().numpy(), expected, rtol=1e-2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_3d_GaussianFilterTestCase.test_3d.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_3d_GaussianFilterTestCase.test_3d.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_gaussian_filter.py", "file_name": "test_gaussian_filter.py", "file_type": "text/x-python", "category": "test", "start_line": 131, "end_line": 163, "span_ids": ["GaussianFilterTestCase.test_3d"], "tokens": 464}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianFilterTestCase(unittest.TestCase):\n\n def test_3d(self):\n a = torch.ones(1, 1, 4, 3, 4)\n g = GaussianFilter(3, 3, 3).to(torch.device(\"cpu:0\"))\n\n expected = np.array(\n [\n [\n [\n [\n [0.07189433, 0.07911152, 0.07911152, 0.07189433],\n [0.07566228, 0.08325771, 0.08325771, 0.07566228],\n [0.07189433, 0.07911152, 0.07911152, 0.07189433],\n ],\n [\n [0.07911152, 0.08705322, 0.08705322, 0.07911152],\n [0.08325771, 0.09161563, 0.09161563, 0.08325771],\n [0.07911152, 0.08705322, 0.08705322, 0.07911152],\n ],\n [\n [0.07911152, 0.08705322, 0.08705322, 0.07911152],\n [0.08325771, 0.09161563, 0.09161563, 0.08325771],\n [0.07911152, 0.08705322, 0.08705322, 0.07911152],\n ],\n [\n [0.07189433, 0.07911152, 0.07911152, 0.07189433],\n [0.07566228, 0.08325771, 0.08325771, 0.07566228],\n [0.07189433, 0.07911152, 0.07911152, 0.07189433],\n ],\n ]\n ]\n ]\n )\n np.testing.assert_allclose(g(a).cpu().numpy(), expected, rtol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_3d_sigmas_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase.test_3d_sigmas_", "embedding": null, "metadata": {"file_path": "tests/test_gaussian_filter.py", "file_name": "test_gaussian_filter.py", "file_type": "text/x-python", "category": "test", "start_line": 177, "end_line": 206, "span_ids": ["GaussianFilterTestCase.test_wrong_args", "impl:9", "GaussianFilterTestCase.test_3d_sigmas"], "tokens": 421}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianFilterTestCase(unittest.TestCase):\n\n def test_3d_sigmas(self):\n a = torch.ones(1, 1, 4, 3, 2)\n g = GaussianFilter(3, [3, 2, 1], 3).to(torch.device(\"cpu:0\"))\n\n expected = np.array(\n [\n [\n [\n [[0.13690521, 0.13690521], [0.15181276, 0.15181276], [0.13690521, 0.13690521]],\n [[0.1506486, 0.15064861], [0.16705267, 0.16705267], [0.1506486, 0.15064861]],\n [[0.1506486, 0.15064861], [0.16705267, 0.16705267], [0.1506486, 0.15064861]],\n [[0.13690521, 0.13690521], [0.15181276, 0.15181276], [0.13690521, 0.13690521]],\n ]\n ]\n ]\n )\n np.testing.assert_allclose(g(a).cpu().numpy(), expected, rtol=1e-5)\n if torch.cuda.is_available():\n g = GaussianFilter(3, [3, 2, 1], 3).to(torch.device(\"cuda:0\"))\n np.testing.assert_allclose(g(a.cuda()).cpu().numpy(), expected, rtol=1e-2)\n\n def test_wrong_args(self):\n with self.assertRaisesRegex(ValueError, \"\"):\n GaussianFilter(3, [3, 2], 3).to(torch.device(\"cpu:0\"))\n GaussianFilter(3, [3, 2, 1], 3).to(torch.device(\"cpu:0\")) # test init\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_unittest_TEST_CASES": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_unittest_TEST_CASES", "embedding": null, "metadata": {"file_path": "tests/test_generalized_dice_loss.py", "file_name": "test_generalized_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 145, "span_ids": ["docstring"], "tokens": 53}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.losses import GeneralizedDiceLoss\nfrom tests.utils import SkipIfBeforePyTorchVersion, test_script_save\n\nTEST_CASES =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss_TestGeneralizedDiceLoss.test_ill_shape.with_self_assertRaisesReg.loss_forward_torch_ones_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss_TestGeneralizedDiceLoss.test_ill_shape.with_self_assertRaisesReg.loss_forward_torch_ones_", "embedding": null, "metadata": {"file_path": "tests/test_generalized_dice_loss.py", "file_name": "test_generalized_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 114, "end_line": 123, "span_ids": ["TestGeneralizedDiceLoss.test_shape", "TestGeneralizedDiceLoss", "TestGeneralizedDiceLoss.test_ill_shape"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralizedDiceLoss(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_shape(self, input_param, input_data, expected_val):\n result = GeneralizedDiceLoss(**input_param).forward(**input_data)\n np.testing.assert_allclose(result.detach().cpu().numpy(), expected_val, rtol=1e-5)\n\n def test_ill_shape(self):\n loss = GeneralizedDiceLoss()\n with self.assertRaisesRegex(AssertionError, \"\"):\n loss.forward(torch.ones((1, 2, 3)), torch.ones((4, 5, 6)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss.test_ill_opts_TestGeneralizedDiceLoss.test_ill_opts.None_2.GeneralizedDiceLoss_reduc": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss.test_ill_opts_TestGeneralizedDiceLoss.test_ill_opts.None_2.GeneralizedDiceLoss_reduc", "embedding": null, "metadata": {"file_path": "tests/test_generalized_dice_loss.py", "file_name": "test_generalized_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 125, "end_line": 133, "span_ids": ["TestGeneralizedDiceLoss.test_ill_opts"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralizedDiceLoss(unittest.TestCase):\n\n def test_ill_opts(self):\n with self.assertRaisesRegex(ValueError, \"\"):\n GeneralizedDiceLoss(sigmoid=True, softmax=True)\n chn_input = torch.ones((1, 1, 3))\n chn_target = torch.ones((1, 1, 3))\n with self.assertRaisesRegex(ValueError, \"\"):\n GeneralizedDiceLoss(reduction=\"unknown\")(chn_input, chn_target)\n with self.assertRaisesRegex(ValueError, \"\"):\n GeneralizedDiceLoss(reduction=None)(chn_input, chn_target)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss.test_input_warnings_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generalized_dice_loss.py_TestGeneralizedDiceLoss.test_input_warnings_", "embedding": null, "metadata": {"file_path": "tests/test_generalized_dice_loss.py", "file_name": "test_generalized_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 169, "end_line": 191, "span_ids": ["TestGeneralizedDiceLoss.test_script", "impl:3", "TestGeneralizedDiceLoss.test_input_warnings"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGeneralizedDiceLoss(unittest.TestCase):\n\n def test_input_warnings(self):\n chn_input = torch.ones((1, 1, 3))\n chn_target = torch.ones((1, 1, 3))\n with self.assertWarns(Warning):\n loss = GeneralizedDiceLoss(include_background=False)\n loss.forward(chn_input, chn_target)\n with self.assertWarns(Warning):\n loss = GeneralizedDiceLoss(softmax=True)\n loss.forward(chn_input, chn_target)\n with self.assertWarns(Warning):\n loss = GeneralizedDiceLoss(to_onehot_y=True)\n loss.forward(chn_input, chn_target)\n\n @SkipIfBeforePyTorchVersion((1, 7, 0))\n def test_script(self):\n loss = GeneralizedDiceLoss()\n test_input = torch.ones(2, 1, 8, 8)\n test_script_save(loss, test_input, test_input)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_pos_neg_label_crop_centers.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_pos_neg_label_crop_centers.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_generate_pos_neg_label_crop_centers.py", "file_name": "test_generate_pos_neg_label_crop_centers.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 46, "span_ids": ["TestGeneratePosNegLabelCropCenters", "impl:3", "TestGeneratePosNegLabelCropCenters.test_type_shape", "docstring"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import generate_pos_neg_label_crop_centers\n\nTEST_CASE_1 = [\n {\n \"spatial_size\": [2, 2, 2],\n \"num_samples\": 2,\n \"pos_ratio\": 1.0,\n \"label_spatial_shape\": [3, 3, 3],\n \"fg_indices\": [1, 9, 18],\n \"bg_indices\": [3, 12, 21],\n \"rand_state\": np.random.RandomState(),\n },\n list,\n 2,\n 3,\n]\n\n\nclass TestGeneratePosNegLabelCropCenters(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1])\n def test_type_shape(self, input_data, expected_type, expected_count, expected_shape):\n result = generate_pos_neg_label_crop_centers(**input_data)\n self.assertIsInstance(result, expected_type)\n self.assertEqual(len(result), expected_count)\n self.assertEqual(len(result[0]), expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_spatial_bounding_box.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_generate_spatial_bounding_box.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_generate_spatial_bounding_box.py", "file_name": "test_generate_spatial_bounding_box.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 79, "span_ids": ["TestGenerateSpatialBoundingBox", "impl:11", "impl:7", "docstring", "TestGenerateSpatialBoundingBox.test_value"], "tokens": 787}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import generate_spatial_bounding_box\n\nTEST_CASE_1 = [\n {\n \"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]]),\n \"select_fn\": lambda x: x > 0,\n \"channel_indices\": None,\n \"margin\": 0,\n },\n ([1, 1], [4, 4]),\n]\n\nTEST_CASE_2 = [\n {\n \"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 3, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]]]),\n \"select_fn\": lambda x: x > 1,\n \"channel_indices\": None,\n \"margin\": 0,\n },\n ([2, 2], [3, 3]),\n]\n\nTEST_CASE_3 = [\n {\n \"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]]),\n \"select_fn\": lambda x: x > 0,\n \"channel_indices\": 0,\n \"margin\": 0,\n },\n ([1, 1], [4, 4]),\n]\n\nTEST_CASE_4 = [\n {\n \"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]]),\n \"select_fn\": lambda x: x > 0,\n \"channel_indices\": None,\n \"margin\": 1,\n },\n ([0, 0], [4, 5]),\n]\n\nTEST_CASE_5 = [\n {\n \"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]]),\n \"select_fn\": lambda x: x > 0,\n \"channel_indices\": None,\n \"margin\": [2, 1],\n },\n ([0, 0], [5, 5]),\n]\n\n\nclass TestGenerateSpatialBoundingBox(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_value(self, input_data, expected_box):\n result = generate_spatial_bounding_box(**input_data)\n self.assertTupleEqual(result, expected_box)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_rocauc.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_rocauc.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_handler_rocauc.py", "file_name": "test_handler_rocauc.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 45, "span_ids": ["TestHandlerROCAUC", "TestHandlerROCAUC.test_compute", "impl", "docstring"], "tokens": 240}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.handlers import ROCAUC\nfrom monai.transforms import Activations, AsDiscrete\n\n\nclass TestHandlerROCAUC(unittest.TestCase):\n def test_compute(self):\n auc_metric = ROCAUC()\n act = Activations(softmax=True)\n to_onehot = AsDiscrete(to_onehot=True, n_classes=2)\n\n y_pred = torch.Tensor([[0.1, 0.9], [0.3, 1.4]])\n y = torch.Tensor([[0], [1]])\n y_pred = act(y_pred)\n y = to_onehot(y)\n auc_metric.update([y_pred, y])\n\n y_pred = torch.Tensor([[0.2, 0.1], [0.1, 0.5]])\n y = torch.Tensor([[0], [1]])\n y_pred = act(y_pred)\n y = to_onehot(y)\n auc_metric.update([y_pred, y])\n\n auc = auc_metric.compute()\n np.testing.assert_allclose(0.75, auc)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_segmentation_saver.py_TestHandlerSegmentationSaver.test_save_resized_content_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_segmentation_saver.py_TestHandlerSegmentationSaver.test_save_resized_content_", "embedding": null, "metadata": {"file_path": "tests/test_handler_segmentation_saver.py", "file_name": "test_handler_segmentation_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 49, "end_line": 79, "span_ids": ["impl:5", "TestHandlerSegmentationSaver.test_save_resized_content"], "tokens": 307}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHandlerSegmentationSaver(unittest.TestCase):\n\n @parameterized.expand([TEST_CASE_0, TEST_CASE_1])\n def test_save_resized_content(self, output_ext):\n with tempfile.TemporaryDirectory() as tempdir:\n\n # set up engine\n def _train_func(engine, batch):\n return torch.randint(0, 255, (8, 1, 2, 2)).float()\n\n engine = Engine(_train_func)\n\n # set up testing handler\n saver = SegmentationSaver(output_dir=tempdir, output_postfix=\"seg\", output_ext=output_ext, scale=255)\n saver.attach(engine)\n\n data = [\n {\n \"filename_or_obj\": [\"testfile\" + str(i) + \".nii.gz\" for i in range(8)],\n \"spatial_shape\": [(28, 28)] * 8,\n \"affine\": [np.diag(np.ones(4)) * 5] * 8,\n \"original_affine\": [np.diag(np.ones(4)) * 1.0] * 8,\n }\n ]\n engine.run(data, max_epochs=1)\n for i in range(8):\n filepath = os.path.join(\"testfile\" + str(i), \"testfile\" + str(i) + \"_seg\" + output_ext)\n self.assertTrue(os.path.exists(os.path.join(tempdir, filepath)))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_print_TestHandlerStats.test_loss_print.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_1_2_3_6_7_.self_assertTrue_has_key_w": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_print_TestHandlerStats.test_loss_print.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_1_2_3_6_7_.self_assertTrue_has_key_w", "embedding": null, "metadata": {"file_path": "tests/test_handler_stats.py", "file_name": "test_handler_stats.py", "file_type": "text/x-python", "category": "test", "start_line": 61, "end_line": 88, "span_ids": ["TestHandlerStats.test_loss_print"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHandlerStats(unittest.TestCase):\n\n def test_loss_print(self):\n log_stream = StringIO()\n log_handler = logging.StreamHandler(log_stream)\n log_handler.setLevel(logging.INFO)\n key_to_handler = \"test_logging\"\n key_to_print = \"myLoss\"\n\n # set up engine\n def _train_func(engine, batch):\n return torch.tensor(0.0)\n\n engine = Engine(_train_func)\n\n # set up testing handler\n stats_handler = StatsHandler(name=key_to_handler, tag_name=key_to_print, logger_handler=log_handler)\n stats_handler.attach(engine)\n\n engine.run(range(3), max_epochs=2)\n\n # check logging output\n output_str = log_stream.getvalue()\n log_handler.close()\n grep = re.compile(f\".*{key_to_handler}.*\")\n has_key_word = re.compile(f\".*{key_to_print}.*\")\n for idx, line in enumerate(output_str.split(\"\\n\")):\n if grep.match(line):\n if idx in [1, 2, 3, 6, 7, 8]:\n self.assertTrue(has_key_word.match(line))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_dict_TestHandlerStats.test_loss_dict.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_1_2_3_6_7_.self_assertTrue_has_key_w": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_dict_TestHandlerStats.test_loss_dict.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_1_2_3_6_7_.self_assertTrue_has_key_w", "embedding": null, "metadata": {"file_path": "tests/test_handler_stats.py", "file_name": "test_handler_stats.py", "file_type": "text/x-python", "category": "test", "start_line": 90, "end_line": 119, "span_ids": ["TestHandlerStats.test_loss_dict"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHandlerStats(unittest.TestCase):\n\n def test_loss_dict(self):\n log_stream = StringIO()\n log_handler = logging.StreamHandler(log_stream)\n log_handler.setLevel(logging.INFO)\n key_to_handler = \"test_logging\"\n key_to_print = \"myLoss1\"\n\n # set up engine\n def _train_func(engine, batch):\n return torch.tensor(0.0)\n\n engine = Engine(_train_func)\n\n # set up testing handler\n stats_handler = StatsHandler(\n name=key_to_handler, output_transform=lambda x: {key_to_print: x}, logger_handler=log_handler\n )\n stats_handler.attach(engine)\n\n engine.run(range(3), max_epochs=2)\n\n # check logging output\n output_str = log_stream.getvalue()\n log_handler.close()\n grep = re.compile(f\".*{key_to_handler}.*\")\n has_key_word = re.compile(f\".*{key_to_print}.*\")\n for idx, line in enumerate(output_str.split(\"\\n\")):\n if grep.match(line):\n if idx in [1, 2, 3, 6, 7, 8]:\n self.assertTrue(has_key_word.match(line))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_file_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_TestHandlerStats.test_loss_file_", "embedding": null, "metadata": {"file_path": "tests/test_handler_stats.py", "file_name": "test_handler_stats.py", "file_type": "text/x-python", "category": "test", "start_line": 121, "end_line": 169, "span_ids": ["TestHandlerStats.test_loss_file", "impl", "TestHandlerStats.test_exception"], "tokens": 355}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHandlerStats(unittest.TestCase):\n\n def test_loss_file(self):\n key_to_handler = \"test_logging\"\n key_to_print = \"myLoss\"\n\n with tempfile.TemporaryDirectory() as tempdir:\n filename = os.path.join(tempdir, \"test_loss_stats.log\")\n handler = logging.FileHandler(filename, mode=\"w\")\n handler.setLevel(logging.INFO)\n\n # set up engine\n def _train_func(engine, batch):\n return torch.tensor(0.0)\n\n engine = Engine(_train_func)\n\n # set up testing handler\n stats_handler = StatsHandler(name=key_to_handler, tag_name=key_to_print, logger_handler=handler)\n stats_handler.attach(engine)\n\n engine.run(range(3), max_epochs=2)\n handler.close()\n stats_handler.logger.removeHandler(handler)\n with open(filename, \"r\") as f:\n output_str = f.read()\n grep = re.compile(f\".*{key_to_handler}.*\")\n has_key_word = re.compile(f\".*{key_to_print}.*\")\n for idx, line in enumerate(output_str.split(\"\\n\")):\n if grep.match(line):\n if idx in [1, 2, 3, 6, 7, 8]:\n self.assertTrue(has_key_word.match(line))\n\n def test_exception(self):\n # set up engine\n def _train_func(engine, batch):\n raise RuntimeError(\"test exception.\")\n\n engine = Engine(_train_func)\n\n # set up testing handler\n stats_handler = StatsHandler()\n stats_handler.attach(engine)\n\n with self.assertRaises(RuntimeError):\n engine.run(range(3), max_epochs=2)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_image.py_glob_TEST_CASES._20_20_2_20_20_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_image.py_glob_TEST_CASES._20_20_2_20_20_", "embedding": null, "metadata": {"file_path": "tests/test_handler_tb_image.py", "file_name": "test_handler_tb_image.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 23, "span_ids": ["docstring"], "tokens": 107}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import glob\nimport tempfile\nimport unittest\n\nimport numpy as np\nimport torch\nfrom ignite.engine import Engine, Events\nfrom parameterized import parameterized\n\nfrom monai.handlers import TensorBoardImageHandler\n\nTEST_CASES = [[[20, 20]], [[2, 20, 20]], [[3, 20, 20]], [[20, 20, 20]], [[2, 20, 20, 20]], [[2, 2, 20, 20, 20]]]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_image.py_TestHandlerTBImage_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_image.py_TestHandlerTBImage_", "embedding": null, "metadata": {"file_path": "tests/test_handler_tb_image.py", "file_name": "test_handler_tb_image.py", "file_type": "text/x-python", "category": "test", "start_line": 26, "end_line": 50, "span_ids": ["TestHandlerTBImage.test_tb_image_shape", "impl:3", "TestHandlerTBImage"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHandlerTBImage(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_tb_image_shape(self, shape):\n with tempfile.TemporaryDirectory() as tempdir:\n\n # set up engine\n def _train_func(engine, batch):\n return torch.zeros((1, 1, 10, 10))\n\n engine = Engine(_train_func)\n\n # set up testing handler\n stats_handler = TensorBoardImageHandler(log_dir=tempdir)\n engine.add_event_handler(Events.ITERATION_COMPLETED, stats_handler)\n\n data = zip(np.random.normal(size=(10, 4, *shape)), np.random.normal(size=(10, 4, *shape)))\n engine.run(data, epoch_length=10, max_epochs=1)\n stats_handler.close()\n\n self.assertTrue(len(glob.glob(tempdir)) > 0)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_stats.py_TestHandlerTBStats.test_metrics_writer_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_stats.py_TestHandlerTBStats.test_metrics_writer_", "embedding": null, "metadata": {"file_path": "tests/test_handler_tb_stats.py", "file_name": "test_handler_tb_stats.py", "file_type": "text/x-python", "category": "test", "start_line": 46, "end_line": 75, "span_ids": ["TestHandlerTBStats.test_metrics_writer", "impl"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHandlerTBStats(unittest.TestCase):\n\n def test_metrics_writer(self):\n with tempfile.TemporaryDirectory() as tempdir:\n\n # set up engine\n def _train_func(engine, batch):\n return batch + 1.0\n\n engine = Engine(_train_func)\n\n # set up dummy metric\n @engine.on(Events.EPOCH_COMPLETED)\n def _update_metric(engine):\n current_metric = engine.state.metrics.get(\"acc\", 0.1)\n engine.state.metrics[\"acc\"] = current_metric + 0.1\n\n # set up testing handler\n writer = SummaryWriter(log_dir=tempdir)\n stats_handler = TensorBoardStatsHandler(\n writer, output_transform=lambda x: {\"loss\": x * 2.0}, global_epoch_transform=lambda x: x * 3.0\n )\n stats_handler.attach(engine)\n engine.run(range(3), max_epochs=2)\n writer.close()\n # check logging output\n self.assertTrue(len(glob.glob(tempdir)) > 0)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_validation.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_validation.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_handler_validation.py", "file_name": "test_handler_validation.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 50, "span_ids": ["TestEvaluator", "TestHandlerValidation.test_content", "TestEvaluator._iteration", "impl", "docstring", "TestHandlerValidation"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom ignite.engine import Engine\n\nfrom monai.data import Dataset\nfrom monai.engines import Evaluator\nfrom monai.handlers import ValidationHandler\n\n\nclass TestEvaluator(Evaluator):\n def _iteration(self, engine, batchdata):\n pass\n\n\nclass TestHandlerValidation(unittest.TestCase):\n def test_content(self):\n data = [0] * 8\n\n # set up engine\n def _train_func(engine, batch):\n pass\n\n engine = Engine(_train_func)\n\n # set up testing handler\n val_data_loader = torch.utils.data.DataLoader(Dataset(data))\n evaluator = TestEvaluator(torch.device(\"cpu:0\"), val_data_loader)\n saver = ValidationHandler(interval=2, validator=evaluator)\n saver.attach(engine)\n\n engine.run(data, max_epochs=5)\n self.assertEqual(evaluator.state.max_epochs, 4)\n self.assertEqual(evaluator.state.epoch_length, 8)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_header_correct.py_unittest_TestCorrection.test_correct.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_header_correct.py_unittest_TestCorrection.test_correct.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_header_correct.py", "file_name": "test_header_correct.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 28, "span_ids": ["TestCorrection", "TestCorrection.test_correct", "docstring"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport nibabel as nib\nimport numpy as np\n\nfrom monai.data import correct_nifti_header_if_necessary\n\n\nclass TestCorrection(unittest.TestCase):\n def test_correct(self):\n test_img = nib.Nifti1Image(np.zeros((1, 2, 3)), np.eye(4))\n test_img.header.set_zooms((100, 100, 100))\n test_img = correct_nifti_header_if_necessary(test_img)\n np.testing.assert_allclose(\n test_img.affine,\n np.array([[100.0, 0.0, 0.0, 0.0], [0.0, 100.0, 0.0, 0.0], [0.0, 0.0, 100.0, 0.0], [0.0, 0.0, 0.0, 1.0]]),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_header_correct.py_TestCorrection.test_affine_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_header_correct.py_TestCorrection.test_affine_", "embedding": null, "metadata": {"file_path": "tests/test_header_correct.py", "file_name": "test_header_correct.py", "file_type": "text/x-python", "category": "test", "start_line": 30, "end_line": 41, "span_ids": ["TestCorrection.test_affine", "impl"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCorrection(unittest.TestCase):\n\n def test_affine(self):\n test_img = nib.Nifti1Image(np.zeros((1, 2, 3)), np.eye(4) * 20.0)\n test_img = correct_nifti_header_if_necessary(test_img)\n np.testing.assert_allclose(\n test_img.affine,\n np.array([[20.0, 0.0, 0.0, 0.0], [0.0, 20.0, 0.0, 0.0], [0.0, 0.0, 20.0, 0.0], [0.0, 0.0, 0.0, 20.0]]),\n )\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_identity.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_identity.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_identity.py", "file_name": "test_identity.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 29, "span_ids": ["TestIdentity", "TestIdentity.test_identity", "impl", "docstring"], "tokens": 79}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms.utility.array import Identity\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestIdentity(NumpyImageTestCase2D):\n def test_identity(self):\n img = self.imt\n identity = Identity()\n self.assertTrue(np.allclose(img, identity(img)))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_identityd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_identityd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_identityd.py", "file_name": "test_identityd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 29, "span_ids": ["TestIdentityd.test_identityd", "TestIdentityd", "impl", "docstring"], "tokens": 90}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nfrom monai.transforms.utility.dictionary import Identityd\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestIdentityd(NumpyImageTestCase2D):\n def test_identityd(self):\n img = self.imt\n data = {}\n data[\"img\"] = img\n identity = Identityd(keys=data.keys())\n self.assertEqual(data, identity(data))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_os_MedNISTDataset.__getitem__.return.self_transforms_self_imag": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_os_MedNISTDataset.__getitem__.return.self_transforms_self_imag", "embedding": null, "metadata": {"file_path": "tests/test_integration_classification_2d.py", "file_name": "test_integration_classification_2d.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 57, "span_ids": ["MedNISTDataset", "MedNISTDataset.__len__", "MedNISTDataset.__init__", "docstring", "MedNISTDataset.__getitem__"], "tokens": 311}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport unittest\nimport warnings\nfrom urllib.error import ContentTooShortError, HTTPError\n\nimport numpy as np\nimport torch\nfrom torch.utils.data import DataLoader\n\nimport monai\nfrom monai.apps import download_and_extract\nfrom monai.metrics import compute_roc_auc\nfrom monai.networks import eval_mode\nfrom monai.networks.nets import DenseNet121\nfrom monai.transforms import (\n Activations,\n AddChannel,\n AsDiscrete,\n Compose,\n LoadImage,\n RandFlip,\n RandRotate,\n RandZoom,\n ScaleIntensity,\n ToTensor,\n)\nfrom monai.utils import set_determinism\nfrom tests.testing_data.integration_answers import test_integration_value\nfrom tests.utils import DistTestCase, TimedCall, skip_if_quick\n\nTEST_DATA_URL = \"https://www.dropbox.com/s/5wwskxctvcxiuea/MedNIST.tar.gz?dl=1\"\nMD5_VALUE = \"0bc7306e7427e00ad1c5526a6677552d\"\nTASK = \"integration_classification_2d\"\n\n\nclass MedNISTDataset(torch.utils.data.Dataset):\n def __init__(self, image_files, labels, transforms):\n self.image_files = image_files\n self.labels = labels\n self.transforms = transforms\n\n def __len__(self):\n return len(self.image_files)\n\n def __getitem__(self, index):\n return self.transforms(self.image_files[index]), self.labels[index]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_run_inference_test_run_inference_test.return.tps": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_run_inference_test_run_inference_test.return.tps", "embedding": null, "metadata": {"file_path": "tests/test_integration_classification_2d.py", "file_name": "test_integration_classification_2d.py", "file_type": "text/x-python", "category": "test", "start_line": 148, "end_line": 168, "span_ids": ["run_inference_test"], "tokens": 272}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def run_inference_test(root_dir, test_x, test_y, device=\"cuda:0\", num_workers=10):\n # define transforms for image and classification\n val_transforms = Compose([LoadImage(image_only=True), AddChannel(), ScaleIntensity(), ToTensor()])\n val_ds = MedNISTDataset(test_x, test_y, val_transforms)\n val_loader = DataLoader(val_ds, batch_size=300, num_workers=num_workers)\n\n model = DenseNet121(spatial_dims=2, in_channels=1, out_channels=len(np.unique(test_y))).to(device)\n\n model_filename = os.path.join(root_dir, \"best_metric_model.pth\")\n model.load_state_dict(torch.load(model_filename))\n y_true = []\n y_pred = []\n with eval_mode(model):\n for test_data in val_loader:\n test_images, test_labels = test_data[0].to(device), test_data[1].to(device)\n pred = model(test_images).argmax(dim=1)\n for i in range(len(pred)):\n y_true.append(test_labels[i].item())\n y_pred.append(pred[i].item())\n tps = [np.sum((np.asarray(y_true) == idx) & (np.asarray(y_pred) == idx)) for idx in np.unique(test_y)]\n return tps", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_IntegrationClassification2D.tearDown_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_classification_2d.py_IntegrationClassification2D.tearDown_", "embedding": null, "metadata": {"file_path": "tests/test_integration_classification_2d.py", "file_name": "test_integration_classification_2d.py", "file_type": "text/x-python", "category": "test", "start_line": 220, "end_line": 270, "span_ids": ["IntegrationClassification2D.tearDown", "IntegrationClassification2D.test_training", "impl:7", "IntegrationClassification2D.test_timing", "IntegrationClassification2D.train_and_infer"], "tokens": 471}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@skip_if_quick\nclass IntegrationClassification2D(DistTestCase):\n\n def tearDown(self):\n set_determinism(seed=None)\n try:\n os.remove(os.path.join(self.data_dir, \"best_metric_model.pth\"))\n except FileNotFoundError:\n warnings.warn(\"not found best_metric_model.pth, training skipped?\")\n pass\n\n def train_and_infer(self, idx=0):\n results = []\n if not os.path.exists(os.path.join(self.data_dir, \"MedNIST\")):\n # skip test if no MedNIST dataset\n return results\n\n set_determinism(seed=0)\n losses, best_metric, best_metric_epoch = run_training_test(\n self.data_dir, self.train_x, self.train_y, self.val_x, self.val_y, device=self.device\n )\n infer_metric = run_inference_test(self.data_dir, self.test_x, self.test_y, device=self.device)\n\n print(f\"integration_classification_2d {losses}\")\n print(\"best metric\", best_metric)\n print(\"infer metric\", infer_metric)\n # check training properties\n self.assertTrue(test_integration_value(TASK, key=\"losses\", data=losses, rtol=1e-2))\n self.assertTrue(test_integration_value(TASK, key=\"best_metric\", data=best_metric, rtol=1e-4))\n np.testing.assert_allclose(best_metric_epoch, 4)\n model_file = os.path.join(self.data_dir, \"best_metric_model.pth\")\n self.assertTrue(os.path.exists(model_file))\n # check inference properties\n self.assertTrue(test_integration_value(TASK, key=\"infer_prop\", data=np.asarray(infer_metric), rtol=1))\n results.extend(losses)\n results.append(best_metric)\n results.extend(infer_metric)\n return results\n\n def test_training(self):\n repeated = []\n for i in range(2):\n results = self.train_and_infer(i)\n repeated.append(results)\n np.testing.assert_allclose(repeated[0], repeated[1])\n\n @TimedCall(seconds=1000, skip_timing=not torch.cuda.is_available(), daemon=False)\n def test_timing(self):\n self.train_and_infer()\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_unittest_run_test._TestBatch.__len__.return.train_steps": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_unittest_run_test._TestBatch.__len__.return.train_steps", "embedding": null, "metadata": {"file_path": "tests/test_integration_determinism.py", "file_name": "test_integration_determinism.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["run_test", "docstring"], "tokens": 253}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom torch.utils.data import DataLoader, Dataset\n\nfrom monai.data import create_test_image_2d\nfrom monai.losses import DiceLoss\nfrom monai.networks.nets import UNet\nfrom monai.transforms import AddChannel, Compose, RandRotate90, RandSpatialCrop, ScaleIntensity, ToTensor\nfrom monai.utils import set_determinism\nfrom tests.utils import DistTestCase, TimedCall\n\n\ndef run_test(batch_size=64, train_steps=200, device=\"cuda:0\"):\n class _TestBatch(Dataset):\n def __init__(self, transforms):\n self.transforms = transforms\n\n def __getitem__(self, _unused_id):\n im, seg = create_test_image_2d(128, 128, noise_max=1, num_objs=4, num_seg_classes=1)\n seed = np.random.randint(2147483647)\n self.transforms.set_random_state(seed=seed)\n im = self.transforms(im)\n self.transforms.set_random_state(seed=seed)\n seg = self.transforms(seg)\n return im, seg\n\n def __len__(self):\n return train_steps\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_run_test.net_run_test.return.epoch_loss_step": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_run_test.net_run_test.return.epoch_loss_step", "embedding": null, "metadata": {"file_path": "tests/test_integration_determinism.py", "file_name": "test_integration_determinism.py", "file_type": "text/x-python", "category": "test", "start_line": 43, "end_line": 68, "span_ids": ["run_test"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def run_test(batch_size=64, train_steps=200, device=\"cuda:0\"):\n # ... other code\n\n net = UNet(\n dimensions=2, in_channels=1, out_channels=1, channels=(4, 8, 16, 32), strides=(2, 2, 2), num_res_units=2\n ).to(device)\n\n loss = DiceLoss(sigmoid=True)\n opt = torch.optim.Adam(net.parameters(), 1e-2)\n train_transforms = Compose(\n [AddChannel(), ScaleIntensity(), RandSpatialCrop((96, 96), random_size=False), RandRotate90(), ToTensor()]\n )\n\n src = DataLoader(_TestBatch(train_transforms), batch_size=batch_size, shuffle=True)\n\n net.train()\n epoch_loss = 0\n step = 0\n for img, seg in src:\n step += 1\n opt.zero_grad()\n output = net(img.to(device))\n step_loss = loss(output, seg.to(device))\n step_loss.backward()\n opt.step()\n epoch_loss += step_loss.item()\n epoch_loss /= step\n\n return epoch_loss, step", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_TestDeterminism_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_determinism.py_TestDeterminism_", "embedding": null, "metadata": {"file_path": "tests/test_integration_determinism.py", "file_name": "test_integration_determinism.py", "file_type": "text/x-python", "category": "test", "start_line": 71, "end_line": 89, "span_ids": ["TestDeterminism.tearDown", "impl", "TestDeterminism.setUp", "TestDeterminism", "TestDeterminism.test_training"], "tokens": 145}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDeterminism(DistTestCase):\n def setUp(self):\n self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu:0\")\n\n def tearDown(self):\n set_determinism(seed=None)\n\n @TimedCall(seconds=30)\n def test_training(self):\n set_determinism(seed=0)\n loss, step = run_test(device=self.device)\n print(f\"Deterministic loss {loss} at training step {step}\")\n np.testing.assert_allclose(step, 4)\n np.testing.assert_allclose(loss, 0.535927, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_run_inference_test_run_inference_test.return.metric": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_run_inference_test_run_inference_test.return.metric", "embedding": null, "metadata": {"file_path": "tests/test_integration_segmentation_3d.py", "file_name": "test_integration_segmentation_3d.py", "file_type": "text/x-python", "category": "test", "start_line": 176, "end_line": 226, "span_ids": ["run_inference_test"], "tokens": 640}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def run_inference_test(root_dir, device=\"cuda:0\"):\n images = sorted(glob(os.path.join(root_dir, \"im*.nii.gz\")))\n segs = sorted(glob(os.path.join(root_dir, \"seg*.nii.gz\")))\n val_files = [{\"img\": img, \"seg\": seg} for img, seg in zip(images, segs)]\n\n # define transforms for image and segmentation\n val_transforms = Compose(\n [\n LoadImaged(keys=[\"img\", \"seg\"]),\n AsChannelFirstd(keys=[\"img\", \"seg\"], channel_dim=-1),\n # resampling with align_corners=True or dtype=float64 will generate\n # slight different results between PyTorch 1.5 an 1.6\n Spacingd(keys=[\"img\", \"seg\"], pixdim=[1.2, 0.8, 0.7], mode=[\"bilinear\", \"nearest\"], dtype=np.float32),\n ScaleIntensityd(keys=\"img\"),\n ToTensord(keys=[\"img\", \"seg\"]),\n ]\n )\n val_ds = monai.data.Dataset(data=val_files, transform=val_transforms)\n # sliding window inference need to input 1 image in every iteration\n val_loader = monai.data.DataLoader(val_ds, batch_size=1, num_workers=4)\n val_post_tran = Compose([Activations(sigmoid=True), AsDiscrete(threshold_values=True)])\n dice_metric = DiceMetric(include_background=True, reduction=\"mean\")\n\n model = UNet(\n dimensions=3,\n in_channels=1,\n out_channels=1,\n channels=(16, 32, 64, 128, 256),\n strides=(2, 2, 2, 2),\n num_res_units=2,\n ).to(device)\n\n model_filename = os.path.join(root_dir, \"best_metric_model.pth\")\n model.load_state_dict(torch.load(model_filename))\n with eval_mode(model):\n metric_sum = 0.0\n metric_count = 0\n # resampling with align_corners=True or dtype=float64 will generate\n # slight different results between PyTorch 1.5 an 1.6\n saver = NiftiSaver(output_dir=os.path.join(root_dir, \"output\"), dtype=np.float32)\n for val_data in val_loader:\n val_images, val_labels = val_data[\"img\"].to(device), val_data[\"seg\"].to(device)\n # define sliding window size and batch size for windows inference\n sw_batch_size, roi_size = 4, (96, 96, 96)\n val_outputs = val_post_tran(sliding_window_inference(val_images, roi_size, sw_batch_size, model))\n value, not_nans = dice_metric(y_pred=val_outputs, y=val_labels)\n metric_count += not_nans.item()\n metric_sum += value.item() * not_nans.item()\n saver.save_batch(val_outputs, val_data[\"img_meta_dict\"])\n metric = metric_sum / metric_count\n return metric", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_IntegrationSegmentation3D_IntegrationSegmentation3D.tearDown.shutil_rmtree_self_data_d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_segmentation_3d.py_IntegrationSegmentation3D_IntegrationSegmentation3D.tearDown.shutil_rmtree_self_data_d", "embedding": null, "metadata": {"file_path": "tests/test_integration_segmentation_3d.py", "file_name": "test_integration_segmentation_3d.py", "file_type": "text/x-python", "category": "test", "start_line": 229, "end_line": 246, "span_ids": ["IntegrationSegmentation3D", "IntegrationSegmentation3D.tearDown", "IntegrationSegmentation3D.setUp"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@skip_if_quick\nclass IntegrationSegmentation3D(DistTestCase):\n def setUp(self):\n set_determinism(seed=0)\n\n self.data_dir = tempfile.mkdtemp()\n for i in range(40):\n im, seg = create_test_image_3d(128, 128, 128, num_seg_classes=1, channel_dim=-1)\n n = nib.Nifti1Image(im, np.eye(4))\n nib.save(n, os.path.join(self.data_dir, f\"img{i:d}.nii.gz\"))\n n = nib.Nifti1Image(seg, np.eye(4))\n nib.save(n, os.path.join(self.data_dir, f\"seg{i:d}.nii.gz\"))\n\n self.device = \"cuda:0\" if torch.cuda.is_available() else \"cpu:0\"\n\n def tearDown(self):\n set_determinism(seed=None)\n shutil.rmtree(self.data_dir)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_sliding_window.py_run_test_run_test.return.saved_name": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_sliding_window.py_run_test_run_test.return.saved_name", "embedding": null, "metadata": {"file_path": "tests/test_integration_sliding_window.py", "file_name": "test_integration_sliding_window.py", "file_type": "text/x-python", "category": "test", "start_line": 32, "end_line": 58, "span_ids": ["run_test"], "tokens": 301}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def run_test(batch_size, img_name, seg_name, output_dir, device=\"cuda:0\"):\n ds = ImageDataset([img_name], [seg_name], transform=AddChannel(), seg_transform=AddChannel(), image_only=False)\n loader = DataLoader(ds, batch_size=1, pin_memory=torch.cuda.is_available())\n\n net = UNet(\n dimensions=3, in_channels=1, out_channels=1, channels=(4, 8, 16, 32), strides=(2, 2, 2), num_res_units=2\n ).to(device)\n roi_size = (16, 32, 48)\n sw_batch_size = batch_size\n\n def _sliding_window_processor(_engine, batch):\n img, seg, meta_data = batch\n with eval_mode(net):\n seg_probs = sliding_window_inference(img.to(device), roi_size, sw_batch_size, net, device=device)\n return predict_segmentation(seg_probs)\n\n infer_engine = Engine(_sliding_window_processor)\n\n SegmentationSaver(\n output_dir=output_dir, output_ext=\".nii.gz\", output_postfix=\"seg\", batch_transform=lambda x: x[2]\n ).attach(infer_engine)\n\n infer_engine.run(loader)\n\n basename = os.path.basename(img_name)[: -len(\".nii.gz\")]\n saved_name = os.path.join(output_dir, basename, f\"{basename}_seg.nii.gz\")\n return saved_name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_from___future___import_pr_STNBenchmark.forward.return.self_stn_x_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_from___future___import_pr_STNBenchmark.forward.return.self_stn_x_", "embedding": null, "metadata": {"file_path": "tests/test_integration_stn.py", "file_name": "test_integration_stn.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 74, "span_ids": ["STNBenchmark.forward", "STNBenchmark.stn", "docstring", "STNBenchmark.__init__", "STNBenchmark", "STNBenchmark.stn_ref"], "tokens": 554}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from __future__ import print_function\n\nimport unittest\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\n\nfrom monai.data import create_test_image_2d\nfrom monai.networks.layers import AffineTransform\nfrom monai.utils import set_determinism\nfrom tests.utils import DistTestCase, TimedCall\n\n\nclass STNBenchmark(nn.Module):\n \"\"\"\n adapted from https://pytorch.org/tutorials/intermediate/spatial_transformer_tutorial.html\n \"\"\"\n\n def __init__(self, is_ref=True, reverse_indexing=False):\n super().__init__()\n self.is_ref = is_ref\n self.localization = nn.Sequential(\n nn.Conv2d(1, 8, kernel_size=7),\n nn.MaxPool2d(2, stride=2),\n nn.ReLU(True),\n nn.Conv2d(8, 10, kernel_size=5),\n nn.MaxPool2d(2, stride=2),\n nn.ReLU(True),\n )\n # Regressor for the 3 * 2 affine matrix\n self.fc_loc = nn.Sequential(nn.Linear(10 * 3 * 3, 32), nn.ReLU(True), nn.Linear(32, 3 * 2))\n # Initialize the weights/bias with identity transformation\n self.fc_loc[2].weight.data.zero_()\n self.fc_loc[2].bias.data.copy_(torch.tensor([1, 0, 0, 0, 1, 0], dtype=torch.float))\n if not self.is_ref:\n self.xform = AffineTransform(normalized=True, reverse_indexing=reverse_indexing)\n\n # Spatial transformer network forward function\n def stn_ref(self, x):\n xs = self.localization(x)\n xs = xs.view(-1, 10 * 3 * 3)\n theta = self.fc_loc(xs)\n theta = theta.view(-1, 2, 3)\n\n grid = F.affine_grid(theta, x.size(), align_corners=False)\n x = F.grid_sample(x, grid, align_corners=False)\n return x\n\n def stn(self, x):\n xs = self.localization(x)\n xs = xs.view(-1, 10 * 3 * 3)\n theta = self.fc_loc(xs)\n theta = theta.view(-1, 2, 3)\n x = self.xform(x, theta, spatial_size=x.size()[2:])\n return x\n\n def forward(self, x):\n if self.is_ref:\n return self.stn_ref(x)\n return self.stn(x)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_compare_2d_compare_2d.return.model_img_a_detach_cpu": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_compare_2d_compare_2d.return.model_img_a_detach_cpu", "embedding": null, "metadata": {"file_path": "tests/test_integration_stn.py", "file_name": "test_integration_stn.py", "file_type": "text/x-python", "category": "test", "start_line": 76, "end_line": 96, "span_ids": ["compare_2d"], "tokens": 281}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def compare_2d(is_ref=True, device=None, reverse_indexing=False):\n batch_size = 32\n img_a = [create_test_image_2d(28, 28, 5, rad_max=6, noise_max=1)[0][None] for _ in range(batch_size)]\n img_b = [create_test_image_2d(28, 28, 5, rad_max=6, noise_max=1)[0][None] for _ in range(batch_size)]\n img_a = np.stack(img_a, axis=0)\n img_b = np.stack(img_b, axis=0)\n img_a = torch.as_tensor(img_a, device=device)\n img_b = torch.as_tensor(img_b, device=device)\n model = STNBenchmark(is_ref=is_ref, reverse_indexing=reverse_indexing).to(device)\n optimizer = optim.SGD(model.parameters(), lr=0.001)\n model.train()\n init_loss = None\n for _ in range(20):\n optimizer.zero_grad()\n output_a = model(img_a)\n loss = torch.mean((output_a - img_b) ** 2)\n if init_loss is None:\n init_loss = loss.item()\n loss.backward()\n optimizer.step()\n return model(img_a).detach().cpu().numpy(), loss.item(), init_loss", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_TestSpatialTransformerCore_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_stn.py_TestSpatialTransformerCore_", "embedding": null, "metadata": {"file_path": "tests/test_integration_stn.py", "file_name": "test_integration_stn.py", "file_type": "text/x-python", "category": "test", "start_line": 100, "end_line": 134, "span_ids": ["TestSpatialTransformerCore", "TestSpatialTransformerCore.tearDown", "impl", "TestSpatialTransformerCore.test_training", "TestSpatialTransformerCore.setUp"], "tokens": 317}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSpatialTransformerCore(DistTestCase):\n def setUp(self):\n self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu:0\")\n\n def tearDown(self):\n set_determinism(seed=None)\n\n @TimedCall(seconds=60)\n def test_training(self):\n \"\"\"\n check that the quality AffineTransform backpropagation\n \"\"\"\n atol = 1e-5\n set_determinism(seed=0)\n out_ref, loss_ref, init_loss_ref = compare_2d(True, self.device)\n print(out_ref.shape, loss_ref, init_loss_ref)\n\n set_determinism(seed=0)\n out, loss, init_loss = compare_2d(False, self.device)\n print(out.shape, loss, init_loss)\n np.testing.assert_allclose(out_ref, out, atol=atol)\n np.testing.assert_allclose(init_loss_ref, init_loss, atol=atol)\n np.testing.assert_allclose(loss_ref, loss, atol=atol)\n\n set_determinism(seed=0)\n out, loss, init_loss = compare_2d(False, self.device, True)\n print(out.shape, loss, init_loss)\n np.testing.assert_allclose(out_ref, out, atol=atol)\n np.testing.assert_allclose(init_loss_ref, init_loss, atol=atol)\n np.testing.assert_allclose(loss_ref, loss, atol=atol)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_run_inference_test_run_inference_test.return.evaluator_state_best_metr": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_run_inference_test_run_inference_test.return.evaluator_state_best_metr", "embedding": null, "metadata": {"file_path": "tests/test_integration_workflows.py", "file_name": "test_integration_workflows.py", "file_type": "text/x-python", "category": "test", "start_line": 206, "end_line": 267, "span_ids": ["run_inference_test"], "tokens": 567}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def run_inference_test(root_dir, model_file, device=\"cuda:0\", amp=False, num_workers=4):\n images = sorted(glob(os.path.join(root_dir, \"im*.nii.gz\")))\n segs = sorted(glob(os.path.join(root_dir, \"seg*.nii.gz\")))\n val_files = [{\"image\": img, \"label\": seg} for img, seg in zip(images, segs)]\n\n # define transforms for image and segmentation\n val_transforms = Compose(\n [\n LoadImaged(keys=[\"image\", \"label\"]),\n AsChannelFirstd(keys=[\"image\", \"label\"], channel_dim=-1),\n ScaleIntensityd(keys=[\"image\", \"label\"]),\n ToTensord(keys=[\"image\", \"label\"]),\n ]\n )\n\n # create a validation data loader\n val_ds = monai.data.Dataset(data=val_files, transform=val_transforms)\n val_loader = monai.data.DataLoader(val_ds, batch_size=1, num_workers=num_workers)\n\n # create UNet, DiceLoss and Adam optimizer\n net = monai.networks.nets.UNet(\n dimensions=3,\n in_channels=1,\n out_channels=1,\n channels=(16, 32, 64, 128, 256),\n strides=(2, 2, 2, 2),\n num_res_units=2,\n ).to(device)\n\n val_post_transforms = Compose(\n [\n Activationsd(keys=\"pred\", sigmoid=True),\n AsDiscreted(keys=\"pred\", threshold_values=True),\n KeepLargestConnectedComponentd(keys=\"pred\", applied_labels=[1]),\n ]\n )\n val_handlers = [\n StatsHandler(output_transform=lambda x: None),\n CheckpointLoader(load_path=f\"{model_file}\", load_dict={\"net\": net}),\n SegmentationSaver(\n output_dir=root_dir,\n batch_transform=lambda batch: batch[\"image_meta_dict\"],\n output_transform=lambda output: output[\"pred\"],\n ),\n ]\n\n evaluator = SupervisedEvaluator(\n device=device,\n val_data_loader=val_loader,\n network=net,\n inferer=SlidingWindowInferer(roi_size=(96, 96, 96), sw_batch_size=4, overlap=0.5),\n post_transform=val_post_transforms,\n key_val_metric={\n \"val_mean_dice\": MeanDice(include_background=True, output_transform=lambda x: (x[\"pred\"], x[\"label\"]))\n },\n additional_metrics={\"val_acc\": Accuracy(output_transform=lambda x: (x[\"pred\"], x[\"label\"]))},\n val_handlers=val_handlers,\n amp=True if amp else False,\n )\n evaluator.run()\n\n return evaluator.state.best_metric", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_IntegrationWorkflows_IntegrationWorkflows.tearDown.shutil_rmtree_self_data_d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_IntegrationWorkflows_IntegrationWorkflows.tearDown.shutil_rmtree_self_data_d", "embedding": null, "metadata": {"file_path": "tests/test_integration_workflows.py", "file_name": "test_integration_workflows.py", "file_type": "text/x-python", "category": "test", "start_line": 270, "end_line": 289, "span_ids": ["IntegrationWorkflows.tearDown", "IntegrationWorkflows.setUp", "IntegrationWorkflows"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@skip_if_quick\nclass IntegrationWorkflows(DistTestCase):\n def setUp(self):\n set_determinism(seed=0)\n\n self.data_dir = tempfile.mkdtemp()\n for i in range(40):\n im, seg = create_test_image_3d(128, 128, 128, num_seg_classes=1, channel_dim=-1)\n n = nib.Nifti1Image(im, np.eye(4))\n nib.save(n, os.path.join(self.data_dir, f\"img{i:d}.nii.gz\"))\n n = nib.Nifti1Image(seg, np.eye(4))\n nib.save(n, os.path.join(self.data_dir, f\"seg{i:d}.nii.gz\"))\n\n self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu:0\")\n monai.config.print_config()\n logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n\n def tearDown(self):\n set_determinism(seed=None)\n shutil.rmtree(self.data_dir)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_IntegrationWorkflows.test_training_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows.py_IntegrationWorkflows.test_training_", "embedding": null, "metadata": {"file_path": "tests/test_integration_workflows.py", "file_name": "test_integration_workflows.py", "file_type": "text/x-python", "category": "test", "start_line": 331, "end_line": 351, "span_ids": ["impl:3", "IntegrationWorkflows.test_training", "IntegrationWorkflows.test_timing"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@skip_if_quick\nclass IntegrationWorkflows(DistTestCase):\n\n def test_training(self):\n repeated = []\n test_rounds = 3 if monai.utils.module.get_torch_version_tuple() >= (1, 6) else 2\n for i in range(test_rounds):\n results = self.train_and_infer(idx=i)\n repeated.append(results)\n np.testing.assert_allclose(repeated[0], repeated[1])\n\n @TimedCall(\n seconds=300,\n skip_timing=not torch.cuda.is_available(),\n daemon=False,\n )\n def test_timing(self):\n if monai.utils.module.get_torch_version_tuple() >= (1, 6):\n self.train_and_infer(idx=2)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_unittest_grid_2.torch_tensor_0_0_0_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_unittest_grid_2.torch_tensor_0_0_0_", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 20, "span_ids": ["docstring"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import KeepLargestConnectedComponent\n\ngrid_1 = torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [1, 2, 1, 0, 0], [1, 2, 0, 1, 0], [2, 2, 0, 0, 2]]]])\ngrid_2 = torch.tensor([[[[0, 0, 0, 0, 1], [0, 0, 1, 1, 1], [1, 0, 1, 1, 2], [1, 0, 1, 2, 2], [0, 0, 0, 0, 1]]]])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_grid_3_grid_3.torch_tensor_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_grid_3_grid_3.torch_tensor_", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 21, "end_line": 70, "span_ids": ["docstring"], "tokens": 824}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "grid_3 = torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [1.0, 0.0, 1.0, 0.0, 0.0],\n [1.0, 0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 1.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [1.0, 0.0, 1.0, 1.0, 0.0],\n [1.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_1_TEST_CASE_6._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_1_TEST_CASE_6._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 73, "end_line": 113, "span_ids": ["impl:17", "impl:7"], "tokens": 710}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_1 = [\n \"value_1\",\n {\"independent\": False, \"applied_labels\": 1},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [0, 2, 1, 0, 0], [0, 2, 0, 1, 0], [2, 2, 0, 0, 2]]]]),\n]\n\nTEST_CASE_2 = [\n \"value_2\",\n {\"independent\": False, \"applied_labels\": [2]},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [1, 2, 1, 0, 0], [1, 2, 0, 1, 0], [2, 2, 0, 0, 0]]]]),\n]\n\nTEST_CASE_3 = [\n \"independent_value_1_2\",\n {\"independent\": True, \"applied_labels\": [1, 2]},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [0, 2, 1, 0, 0], [0, 2, 0, 1, 0], [2, 2, 0, 0, 0]]]]),\n]\n\nTEST_CASE_4 = [\n \"dependent_value_1_2\",\n {\"independent\": False, \"applied_labels\": [1, 2]},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [1, 2, 1, 0, 0], [1, 2, 0, 1, 0], [2, 2, 0, 0, 2]]]]),\n]\n\nTEST_CASE_5 = [\n \"value_1\",\n {\"independent\": True, \"applied_labels\": [1]},\n grid_2,\n torch.tensor([[[[0, 0, 0, 0, 1], [0, 0, 1, 1, 1], [0, 0, 1, 1, 2], [0, 0, 1, 2, 2], [0, 0, 0, 0, 0]]]]),\n]\n\nTEST_CASE_6 = [\n \"independent_value_1_2\",\n {\"independent\": True, \"applied_labels\": [1, 2]},\n grid_2,\n torch.tensor([[[[0, 0, 0, 0, 1], [0, 0, 1, 1, 1], [0, 0, 1, 1, 2], [0, 0, 1, 2, 2], [0, 0, 0, 0, 0]]]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_7_TEST_CASE_10._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_7_TEST_CASE_10._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 115, "end_line": 141, "span_ids": ["impl:17", "impl:25"], "tokens": 509}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_7 = [\n \"dependent_value_1_2\",\n {\"independent\": False, \"applied_labels\": [1, 2]},\n grid_2,\n torch.tensor([[[[0, 0, 0, 0, 1], [0, 0, 1, 1, 1], [0, 0, 1, 1, 2], [0, 0, 1, 2, 2], [0, 0, 0, 0, 1]]]]),\n]\n\nTEST_CASE_8 = [\n \"value_1_connect_1\",\n {\"independent\": False, \"applied_labels\": [1], \"connectivity\": 1},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [0, 2, 1, 0, 0], [0, 2, 0, 0, 0], [2, 2, 0, 0, 2]]]]),\n]\n\nTEST_CASE_9 = [\n \"independent_value_1_2_connect_1\",\n {\"independent\": True, \"applied_labels\": [1, 2], \"connectivity\": 1},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [0, 2, 1, 0, 0], [0, 2, 0, 0, 0], [2, 2, 0, 0, 0]]]]),\n]\n\nTEST_CASE_10 = [\n \"dependent_value_1_2_connect_1\",\n {\"independent\": False, \"applied_labels\": [1, 2], \"connectivity\": 1},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [1, 2, 1, 0, 0], [1, 2, 0, 0, 0], [2, 2, 0, 0, 0]]]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_11_TEST_CASE_11._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_11_TEST_CASE_11._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 143, "end_line": 197, "span_ids": ["impl:25"], "tokens": 873}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_11 = [\n \"onehot_independent_batch_2_apply_label_1_connect_1\",\n {\"independent\": True, \"applied_labels\": [1], \"connectivity\": 1},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 1.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_12_TEST_CASE_12._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_12_TEST_CASE_12._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 199, "end_line": 253, "span_ids": ["impl:29"], "tokens": 873}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_12 = [\n \"onehot_independent_batch_2_apply_label_1_connect_2\",\n {\"independent\": True, \"applied_labels\": [1], \"connectivity\": 2},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 1.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_13_TEST_CASE_13._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_13_TEST_CASE_13._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 255, "end_line": 309, "span_ids": ["impl:31"], "tokens": 878}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_13 = [\n \"onehot_independent_batch_2_apply_label_1_2_connect_2\",\n {\"independent\": True, \"applied_labels\": [1, 2], \"connectivity\": 2},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_14_TEST_CASE_14._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_14_TEST_CASE_14._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 311, "end_line": 365, "span_ids": ["impl:33"], "tokens": 878}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_14 = [\n \"onehot_dependent_batch_2_apply_label_1_2_connect_2\",\n {\"independent\": False, \"applied_labels\": [1, 2], \"connectivity\": 2},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [1.0, 0.0, 1.0, 0.0, 0.0],\n [1.0, 0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 1.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_15_TEST_CASE_15._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TEST_CASE_15_TEST_CASE_15._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 367, "end_line": 421, "span_ids": ["impl:35"], "tokens": 878}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_15 = [\n \"onehot_dependent_batch_2_apply_label_1_2_connect_1\",\n {\"independent\": False, \"applied_labels\": [1, 2], \"connectivity\": 1},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [1.0, 0.0, 1.0, 0.0, 0.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_VALID_CASES_INVALID_CASES._ITEST_CASE_1_ITEST_CASE": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_VALID_CASES_INVALID_CASES._ITEST_CASE_1_ITEST_CASE", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 423, "end_line": 445, "span_ids": ["impl:37"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "VALID_CASES = [\n TEST_CASE_1,\n TEST_CASE_2,\n TEST_CASE_3,\n TEST_CASE_4,\n TEST_CASE_5,\n TEST_CASE_6,\n TEST_CASE_7,\n TEST_CASE_8,\n TEST_CASE_9,\n TEST_CASE_10,\n TEST_CASE_11,\n TEST_CASE_12,\n TEST_CASE_13,\n TEST_CASE_14,\n TEST_CASE_15,\n]\n\nITEST_CASE_1 = [\"no_applied_labels_for_single_channel\", {\"independent\": False}, grid_1, TypeError]\n\nITEST_CASE_2 = [\"no_applied_labels_for_multi_channel\", {\"independent\": False}, grid_3, TypeError]\n\nINVALID_CASES = [ITEST_CASE_1, ITEST_CASE_2]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TestKeepLargestConnectedComponent_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_component.py_TestKeepLargestConnectedComponent_", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_component.py", "file_name": "test_keep_largest_connected_component.py", "file_type": "text/x-python", "category": "test", "start_line": 448, "end_line": 471, "span_ids": ["impl:45", "TestKeepLargestConnectedComponent", "TestKeepLargestConnectedComponent.test_raise_exception", "TestKeepLargestConnectedComponent.test_correct_results"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestKeepLargestConnectedComponent(unittest.TestCase):\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, _, args, tensor, expected):\n converter = KeepLargestConnectedComponent(**args)\n if torch.cuda.is_available():\n result = converter(tensor.clone().cuda())\n assert torch.allclose(result, expected.cuda())\n else:\n result = converter(tensor.clone())\n assert torch.allclose(result, expected)\n\n @parameterized.expand(INVALID_CASES)\n def test_raise_exception(self, _, args, tensor, expected_error):\n with self.assertRaises(expected_error):\n converter = KeepLargestConnectedComponent(**args)\n if torch.cuda.is_available():\n _ = converter(tensor.clone().cuda())\n else:\n _ = converter(tensor.clone())\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_unittest_grid_2._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_unittest_grid_2._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 24, "span_ids": ["docstring"], "tokens": 205}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import KeepLargestConnectedComponentd\n\ngrid_1 = {\n \"img\": torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [1, 2, 1, 0, 0], [1, 2, 0, 1, 0], [2, 2, 0, 0, 2]]]])\n}\ngrid_2 = {\n \"img\": torch.tensor([[[[0, 0, 0, 0, 1], [0, 0, 1, 1, 1], [1, 0, 1, 1, 2], [1, 0, 1, 2, 2], [0, 0, 0, 0, 1]]]])\n}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_grid_3_grid_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_grid_3_grid_3._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 25, "end_line": 76, "span_ids": ["docstring"], "tokens": 831}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "grid_3 = {\n \"img\": torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [1.0, 0.0, 1.0, 0.0, 0.0],\n [1.0, 0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 1.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [1.0, 0.0, 1.0, 1.0, 0.0],\n [1.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n )\n}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_1_TEST_CASE_6._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_1_TEST_CASE_6._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 78, "end_line": 118, "span_ids": ["impl:15", "impl:7"], "tokens": 746}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_1 = [\n \"value_1\",\n {\"keys\": [\"img\"], \"independent\": False, \"applied_labels\": 1},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [0, 2, 1, 0, 0], [0, 2, 0, 1, 0], [2, 2, 0, 0, 2]]]]),\n]\n\nTEST_CASE_2 = [\n \"value_2\",\n {\"keys\": [\"img\"], \"independent\": False, \"applied_labels\": [2]},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [1, 2, 1, 0, 0], [1, 2, 0, 1, 0], [2, 2, 0, 0, 0]]]]),\n]\n\nTEST_CASE_3 = [\n \"independent_value_1_2\",\n {\"keys\": [\"img\"], \"independent\": True, \"applied_labels\": [1, 2]},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [0, 2, 1, 0, 0], [0, 2, 0, 1, 0], [2, 2, 0, 0, 0]]]]),\n]\n\nTEST_CASE_4 = [\n \"dependent_value_1_2\",\n {\"keys\": [\"img\"], \"independent\": False, \"applied_labels\": [1, 2]},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [1, 2, 1, 0, 0], [1, 2, 0, 1, 0], [2, 2, 0, 0, 2]]]]),\n]\n\nTEST_CASE_5 = [\n \"value_1\",\n {\"keys\": [\"img\"], \"independent\": True, \"applied_labels\": [1]},\n grid_2,\n torch.tensor([[[[0, 0, 0, 0, 1], [0, 0, 1, 1, 1], [0, 0, 1, 1, 2], [0, 0, 1, 2, 2], [0, 0, 0, 0, 0]]]]),\n]\n\nTEST_CASE_6 = [\n \"independent_value_1_2\",\n {\"keys\": [\"img\"], \"independent\": True, \"applied_labels\": [1, 2]},\n grid_2,\n torch.tensor([[[[0, 0, 0, 0, 1], [0, 0, 1, 1, 1], [0, 0, 1, 1, 2], [0, 0, 1, 2, 2], [0, 0, 0, 0, 0]]]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_7_TEST_CASE_10._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_7_TEST_CASE_10._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 120, "end_line": 146, "span_ids": ["impl:23", "impl:15"], "tokens": 533}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_7 = [\n \"dependent_value_1_2\",\n {\"keys\": [\"img\"], \"independent\": False, \"applied_labels\": [1, 2]},\n grid_2,\n torch.tensor([[[[0, 0, 0, 0, 1], [0, 0, 1, 1, 1], [0, 0, 1, 1, 2], [0, 0, 1, 2, 2], [0, 0, 0, 0, 1]]]]),\n]\n\nTEST_CASE_8 = [\n \"value_1_connect_1\",\n {\"keys\": [\"img\"], \"independent\": False, \"applied_labels\": [1], \"connectivity\": 1},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [0, 2, 1, 0, 0], [0, 2, 0, 0, 0], [2, 2, 0, 0, 2]]]]),\n]\n\nTEST_CASE_9 = [\n \"independent_value_1_2_connect_1\",\n {\"keys\": [\"img\"], \"independent\": True, \"applied_labels\": [1, 2], \"connectivity\": 1},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [0, 2, 1, 0, 0], [0, 2, 0, 0, 0], [2, 2, 0, 0, 0]]]]),\n]\n\nTEST_CASE_10 = [\n \"dependent_value_1_2_connect_1\",\n {\"keys\": [\"img\"], \"independent\": False, \"applied_labels\": [1, 2], \"connectivity\": 1},\n grid_1,\n torch.tensor([[[[0, 0, 1, 0, 0], [0, 2, 1, 1, 1], [1, 2, 1, 0, 0], [1, 2, 0, 0, 0], [2, 2, 0, 0, 0]]]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_11_TEST_CASE_11._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_11_TEST_CASE_11._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 148, "end_line": 202, "span_ids": ["impl:23"], "tokens": 879}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_11 = [\n \"onehot_independent_batch_2_apply_label_1_connect_1\",\n {\"keys\": [\"img\"], \"independent\": True, \"applied_labels\": [1], \"connectivity\": 1},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 1.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_12_TEST_CASE_12._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_12_TEST_CASE_12._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 204, "end_line": 258, "span_ids": ["impl:29"], "tokens": 879}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_12 = [\n \"onehot_independent_batch_2_apply_label_1_connect_2\",\n {\"keys\": [\"img\"], \"independent\": True, \"applied_labels\": [1], \"connectivity\": 2},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 1.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_13_TEST_CASE_13._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_13_TEST_CASE_13._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 260, "end_line": 314, "span_ids": ["impl:31"], "tokens": 884}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_13 = [\n \"onehot_independent_batch_2_apply_label_1_2_connect_2\",\n {\"keys\": [\"img\"], \"independent\": True, \"applied_labels\": [1, 2], \"connectivity\": 2},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_14_TEST_CASE_14._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_14_TEST_CASE_14._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 316, "end_line": 370, "span_ids": ["impl:33"], "tokens": 884}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_14 = [\n \"onehot_dependent_batch_2_apply_label_1_2_connect_2\",\n {\"keys\": [\"img\"], \"independent\": False, \"applied_labels\": [1, 2], \"connectivity\": 2},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [1.0, 0.0, 1.0, 0.0, 0.0],\n [1.0, 0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 1.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_15_TEST_CASE_15._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TEST_CASE_15_TEST_CASE_15._", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 372, "end_line": 426, "span_ids": ["impl:35"], "tokens": 884}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_15 = [\n \"onehot_dependent_batch_2_apply_label_1_2_connect_1\",\n {\"keys\": [\"img\"], \"independent\": False, \"applied_labels\": [1, 2], \"connectivity\": 1},\n grid_3,\n torch.tensor(\n [\n [\n [\n [1.0, 1.0, 0.0, 1.0, 1.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [1.0, 0.0, 1.0, 0.0, 0.0],\n [1.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n ],\n ],\n [\n [\n [1.0, 1.0, 1.0, 1.0, 0.0],\n [1.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0, 0.0],\n [1.0, 1.0, 1.0, 1.0, 0.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 1.0],\n [0.0, 0.0, 1.0, 1.0, 0.0],\n [0.0, 0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n ],\n [\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0, 0.0, 1.0],\n [0.0, 0.0, 0.0, 1.0, 1.0],\n [0.0, 0.0, 0.0, 0.0, 0.0],\n ],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_VALID_CASES_INVALID_CASES._ITEST_CASE_1_ITEST_CASE": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_VALID_CASES_INVALID_CASES._ITEST_CASE_1_ITEST_CASE", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 428, "end_line": 450, "span_ids": ["impl:37"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "VALID_CASES = [\n TEST_CASE_1,\n TEST_CASE_2,\n TEST_CASE_3,\n TEST_CASE_4,\n TEST_CASE_5,\n TEST_CASE_6,\n TEST_CASE_7,\n TEST_CASE_8,\n TEST_CASE_9,\n TEST_CASE_10,\n TEST_CASE_11,\n TEST_CASE_12,\n TEST_CASE_13,\n TEST_CASE_14,\n TEST_CASE_15,\n]\n\nITEST_CASE_1 = [\"no_applied_labels_for_single_channel\", {\"keys\": [\"img\"], \"independent\": False}, grid_1, TypeError]\n\nITEST_CASE_2 = [\"no_applied_labels_for_multi_channel\", {\"keys\": [\"img\"], \"independent\": False}, grid_3, TypeError]\n\nINVALID_CASES = [ITEST_CASE_1, ITEST_CASE_2]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TestKeepLargestConnectedComponentd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_keep_largest_connected_componentd.py_TestKeepLargestConnectedComponentd_", "embedding": null, "metadata": {"file_path": "tests/test_keep_largest_connected_componentd.py", "file_name": "test_keep_largest_connected_componentd.py", "file_type": "text/x-python", "category": "test", "start_line": 453, "end_line": 476, "span_ids": ["TestKeepLargestConnectedComponentd.test_correct_results", "TestKeepLargestConnectedComponentd.test_raise_exception", "TestKeepLargestConnectedComponentd", "impl:45"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestKeepLargestConnectedComponentd(unittest.TestCase):\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, _, args, input_dict, expected):\n converter = KeepLargestConnectedComponentd(**args)\n if torch.cuda.is_available():\n input_dict[\"img\"] = input_dict[\"img\"].cuda()\n result = converter(input_dict)\n torch.allclose(result[\"img\"], expected.cuda())\n else:\n result = converter(input_dict)\n torch.allclose(result[\"img\"], expected)\n\n @parameterized.expand(INVALID_CASES)\n def test_raise_exception(self, _, args, input_dict, expected_error):\n with self.assertRaises(expected_error):\n converter = KeepLargestConnectedComponentd(**args)\n if torch.cuda.is_available():\n input_dict[\"img\"] = input_dict[\"img\"].cuda()\n _ = converter(input_dict)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_unittest_expected_output_for_cube": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_unittest_expected_output_for_cube", "embedding": null, "metadata": {"file_path": "tests/test_label_to_contour.py", "file_name": "test_label_to_contour.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 102, "span_ids": ["docstring"], "tokens": 30}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.transforms import LabelToContour\n\nexpected_output_for_cube =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_gen_fixed_cube_gen_fixed_cube.return.cube_expected_output_for": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_gen_fixed_cube_gen_fixed_cube.return.cube_expected_output_for", "embedding": null, "metadata": {"file_path": "tests/test_label_to_contour.py", "file_name": "test_label_to_contour.py", "file_type": "text/x-python", "category": "test", "start_line": 103, "end_line": 113, "span_ids": ["gen_fixed_cube"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def gen_fixed_cube():\n scale, core_start, core_end = 8, 1, 7\n cube = torch.zeros(scale, scale, scale)\n cube[core_start:core_end, core_start:core_end, core_start:core_end] = torch.ones(\n core_end - core_start, core_end - core_start, core_end - core_start\n )\n cube = torch.unsqueeze(cube, 0)\n\n batch_size, channels = 10, 6\n cube = cube.repeat(batch_size, channels, 1, 1, 1)\n return cube, expected_output_for_cube", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_gen_fixed_img_gen_fixed_img.return.img_expected_output_for_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_gen_fixed_img_gen_fixed_img.return.img_expected_output_for_", "embedding": null, "metadata": {"file_path": "tests/test_label_to_contour.py", "file_name": "test_label_to_contour.py", "file_type": "text/x-python", "category": "test", "start_line": 116, "end_line": 139, "span_ids": ["gen_fixed_img"], "tokens": 300}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def gen_fixed_img():\n img = torch.tensor(\n [\n [0, 0, 0, 1, 1, 1, 1],\n [0, 0, 0, 1, 1, 1, 1],\n [0, 0, 1, 1, 1, 1, 1],\n [0, 1, 1, 1, 1, 1, 1],\n [1, 1, 1, 1, 1, 1, 1],\n ],\n dtype=torch.float32,\n )\n batch_size, channels = 10, 6\n img = img.repeat(batch_size, channels, 1, 1)\n expected_output_for_img = torch.tensor(\n [\n [0, 0, 0, 1, 1, 1, 1],\n [0, 0, 0, 1, 0, 0, 1],\n [0, 0, 1, 1, 0, 0, 1],\n [0, 1, 1, 0, 0, 0, 1],\n [1, 1, 1, 1, 1, 1, 1],\n ],\n dtype=torch.float32,\n )\n return img, expected_output_for_img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_TestContour_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contour.py_TestContour_", "embedding": null, "metadata": {"file_path": "tests/test_label_to_contour.py", "file_name": "test_label_to_contour.py", "file_type": "text/x-python", "category": "test", "start_line": 144, "end_line": 179, "span_ids": ["impl:3", "TestContour", "TestContour.test_contour"], "tokens": 347}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestContour(unittest.TestCase):\n def test_contour(self):\n input_param = {\"kernel_type\": \"Laplace\"}\n\n # check 5-dim input data\n test_cube, expected_output = gen_fixed_cube()\n test_result_cube = LabelToContour(**input_param)(test_cube)\n self.assertEqual(test_result_cube.shape, test_cube.shape)\n\n test_result_np = test_result_cube.data.cpu().numpy()\n batch_size, channels = test_cube.shape[0], test_cube.shape[1]\n for batch in range(batch_size):\n for channel in range(channels):\n np.testing.assert_allclose(test_result_np[batch, channel, ...], expected_output)\n\n # check 4-dim input data\n test_img, expected_output = gen_fixed_img()\n batch_size, channels = test_img.shape[0], test_img.shape[1]\n test_result_img = LabelToContour(**input_param)(test_img)\n self.assertEqual(test_result_img.shape, test_img.shape)\n\n test_result_np = test_result_img.data.cpu().numpy()\n for batch in range(batch_size):\n for channel in range(channels):\n np.testing.assert_allclose(test_result_img[batch, channel, ...], expected_output)\n\n # check invalid input data\n error_input = torch.rand(1, 2, 3)\n self.assertRaises(ValueError, LabelToContour(**input_param), error_input)\n error_input = torch.rand(1, 2, 3, 4, 5, 6)\n self.assertRaises(ValueError, LabelToContour(**input_param), error_input)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_unittest_expected_output_for_cube": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_unittest_expected_output_for_cube", "embedding": null, "metadata": {"file_path": "tests/test_label_to_contourd.py", "file_name": "test_label_to_contourd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 102, "span_ids": ["docstring"], "tokens": 30}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.transforms import LabelToContourd\n\nexpected_output_for_cube =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_gen_fixed_cube_gen_fixed_cube.return.cube_expected_output_for": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_gen_fixed_cube_gen_fixed_cube.return.cube_expected_output_for", "embedding": null, "metadata": {"file_path": "tests/test_label_to_contourd.py", "file_name": "test_label_to_contourd.py", "file_type": "text/x-python", "category": "test", "start_line": 103, "end_line": 113, "span_ids": ["gen_fixed_cube"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def gen_fixed_cube():\n scale, core_start, core_end = 8, 1, 7\n cube = torch.zeros(scale, scale, scale)\n cube[core_start:core_end, core_start:core_end, core_start:core_end] = torch.ones(\n core_end - core_start, core_end - core_start, core_end - core_start\n )\n cube = torch.unsqueeze(cube, 0)\n\n batch_size, channels = 10, 6\n cube = cube.repeat(batch_size, channels, 1, 1, 1)\n return cube, expected_output_for_cube", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_gen_fixed_img_gen_fixed_img.return.img_expected_output_for_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_gen_fixed_img_gen_fixed_img.return.img_expected_output_for_", "embedding": null, "metadata": {"file_path": "tests/test_label_to_contourd.py", "file_name": "test_label_to_contourd.py", "file_type": "text/x-python", "category": "test", "start_line": 116, "end_line": 139, "span_ids": ["gen_fixed_img"], "tokens": 300}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def gen_fixed_img():\n img = torch.tensor(\n [\n [0, 0, 0, 1, 1, 1, 1],\n [0, 0, 0, 1, 1, 1, 1],\n [0, 0, 1, 1, 1, 1, 1],\n [0, 1, 1, 1, 1, 1, 1],\n [1, 1, 1, 1, 1, 1, 1],\n ],\n dtype=torch.float32,\n )\n batch_size, channels = 10, 6\n img = img.repeat(batch_size, channels, 1, 1)\n expected_output_for_img = torch.tensor(\n [\n [0, 0, 0, 1, 1, 1, 1],\n [0, 0, 0, 1, 0, 0, 1],\n [0, 0, 1, 1, 0, 0, 1],\n [0, 1, 1, 0, 0, 0, 1],\n [1, 1, 1, 1, 1, 1, 1],\n ],\n dtype=torch.float32,\n )\n return img, expected_output_for_img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_TestContourd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_contourd.py_TestContourd_", "embedding": null, "metadata": {"file_path": "tests/test_label_to_contourd.py", "file_name": "test_label_to_contourd.py", "file_type": "text/x-python", "category": "test", "start_line": 144, "end_line": 179, "span_ids": ["impl:3", "TestContourd", "TestContourd.test_contour"], "tokens": 379}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestContourd(unittest.TestCase):\n def test_contour(self):\n input_param = {\"keys\": \"img\", \"kernel_type\": \"Laplace\"}\n\n # check 5-dim input data\n test_cube, expected_output = gen_fixed_cube()\n test_result_cube = LabelToContourd(**input_param)({\"img\": test_cube})\n self.assertEqual(test_result_cube[\"img\"].shape, test_cube.shape)\n\n test_result_np = test_result_cube[\"img\"].data.cpu().numpy()\n batch_size, channels = test_cube.shape[0], test_cube.shape[1]\n for batch in range(batch_size):\n for channel in range(channels):\n np.testing.assert_allclose(test_result_np[batch, channel, ...], expected_output)\n\n # check 4-dim input data\n test_img, expected_output = gen_fixed_img()\n batch_size, channels = test_img.shape[0], test_img.shape[1]\n test_result_img = LabelToContourd(**input_param)({\"img\": test_img})\n self.assertEqual(test_result_img[\"img\"].shape, test_img.shape)\n\n test_result_np = test_result_img[\"img\"].data.cpu().numpy()\n for batch in range(batch_size):\n for channel in range(channels):\n np.testing.assert_allclose(test_result_img[\"img\"][batch, channel, ...], expected_output)\n\n # check invalid input data\n error_input = {\"img\": torch.rand(1, 2, 3)}\n self.assertRaises(ValueError, LabelToContourd(**input_param), error_input)\n error_input = {\"img\": torch.rand(1, 2, 3, 4, 5, 6)}\n self.assertRaises(ValueError, LabelToContourd(**input_param), error_input)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_mask.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_mask.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_label_to_mask.py", "file_name": "test_label_to_mask.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 59, "span_ids": ["TestLabelToMask.test_value", "impl:11", "impl:7", "docstring", "TestLabelToMask"], "tokens": 726}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import LabelToMask\n\nTEST_CASE_1 = [\n {\"select_labels\": [2, 3], \"merge_channels\": False},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array([[[0, 0, 0], [1, 1, 1], [1, 1, 1], [0, 0, 0], [0, 0, 0], [0, 0, 0]]]),\n]\n\nTEST_CASE_2 = [\n {\"select_labels\": 2, \"merge_channels\": False},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array([[[0, 0, 0], [1, 1, 1], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]]]),\n]\n\nTEST_CASE_3 = [\n {\"select_labels\": [1, 2], \"merge_channels\": False},\n np.array([[[0, 0, 1], [0, 1, 0]], [[1, 0, 0], [0, 1, 1]], [[1, 0, 1], [1, 1, 0]]]),\n np.array([[[1, 0, 0], [0, 1, 1]], [[1, 0, 1], [1, 1, 0]]]),\n]\n\nTEST_CASE_4 = [\n {\"select_labels\": 2, \"merge_channels\": False},\n np.array([[[0, 0, 1], [0, 1, 0]], [[1, 0, 0], [0, 1, 1]], [[1, 0, 1], [1, 1, 0]]]),\n np.array([[[1, 0, 1], [1, 1, 0]]]),\n]\n\nTEST_CASE_5 = [\n {\"select_labels\": [1, 2], \"merge_channels\": True},\n np.array([[[0, 0, 1], [0, 1, 0]], [[1, 0, 0], [0, 1, 1]], [[1, 0, 1], [1, 1, 0]]]),\n np.array([[[1, 0, 1], [1, 1, 1]]]),\n]\n\n\nclass TestLabelToMask(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_value(self, argments, image, expected_data):\n result = LabelToMask(**argments)(image)\n np.testing.assert_allclose(result, expected_data)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_maskd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_label_to_maskd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_label_to_maskd.py", "file_name": "test_label_to_maskd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 59, "span_ids": ["impl:11", "TestLabelToMaskd", "impl:7", "TestLabelToMaskd.test_value", "docstring"], "tokens": 781}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import LabelToMaskd\n\nTEST_CASE_1 = [\n {\"keys\": \"img\", \"select_labels\": [2, 3], \"merge_channels\": False},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array([[[0, 0, 0], [1, 1, 1], [1, 1, 1], [0, 0, 0], [0, 0, 0], [0, 0, 0]]]),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"img\", \"select_labels\": 2, \"merge_channels\": False},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array([[[0, 0, 0], [1, 1, 1], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]]]),\n]\n\nTEST_CASE_3 = [\n {\"keys\": \"img\", \"select_labels\": [1, 2], \"merge_channels\": False},\n {\"img\": np.array([[[0, 0, 1], [0, 1, 0]], [[1, 0, 0], [0, 1, 1]], [[1, 0, 1], [1, 1, 0]]])},\n np.array([[[1, 0, 0], [0, 1, 1]], [[1, 0, 1], [1, 1, 0]]]),\n]\n\nTEST_CASE_4 = [\n {\"keys\": \"img\", \"select_labels\": 2, \"merge_channels\": False},\n {\"img\": np.array([[[0, 0, 1], [0, 1, 0]], [[1, 0, 0], [0, 1, 1]], [[1, 0, 1], [1, 1, 0]]])},\n np.array([[[1, 0, 1], [1, 1, 0]]]),\n]\n\nTEST_CASE_5 = [\n {\"keys\": \"img\", \"select_labels\": [1, 2], \"merge_channels\": True},\n {\"img\": np.array([[[0, 0, 1], [0, 1, 0]], [[1, 0, 0], [0, 1, 1]], [[1, 0, 1], [1, 1, 0]]])},\n np.array([[[1, 0, 1], [1, 1, 1]]]),\n]\n\n\nclass TestLabelToMaskd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_value(self, argments, image, expected_data):\n result = LabelToMaskd(**argments)(image)\n np.testing.assert_allclose(result[\"img\"], expected_data)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lambda.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lambda.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_lambda.py", "file_name": "test_lambda.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 42, "span_ids": ["impl", "TestLambda.test_lambda_identity", "docstring", "TestLambda", "TestLambda.test_lambda_slicing"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms.utility.array import Lambda\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestLambda(NumpyImageTestCase2D):\n def test_lambda_identity(self):\n img = self.imt\n\n def identity_func(x):\n return x\n\n lambd = Lambda(func=identity_func)\n self.assertTrue(np.allclose(identity_func(img), lambd(img)))\n\n def test_lambda_slicing(self):\n img = self.imt\n\n def slice_func(x):\n return x[:, :, :6, ::-2]\n\n lambd = Lambda(func=slice_func)\n self.assertTrue(np.allclose(slice_func(img), lambd(img)))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lambdad.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lambdad.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_lambdad.py", "file_name": "test_lambdad.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 49, "span_ids": ["impl", "TestLambdad", "docstring", "TestLambdad.test_lambdad_identity", "TestLambdad.test_lambdad_slicing"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms.utility.dictionary import Lambdad\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestLambdad(NumpyImageTestCase2D):\n def test_lambdad_identity(self):\n img = self.imt\n data = {\"img\": img, \"prop\": 1.0}\n\n def noise_func(x):\n return x + 1.0\n\n expected = {\"img\": noise_func(data[\"img\"]), \"prop\": 1.0}\n ret = Lambdad(keys=[\"img\", \"prop\"], func=noise_func, overwrite=[True, False])(data)\n self.assertTrue(np.allclose(expected[\"img\"], ret[\"img\"]))\n self.assertTrue(np.allclose(expected[\"prop\"], ret[\"prop\"]))\n\n def test_lambdad_slicing(self):\n img = self.imt\n data = {}\n data[\"img\"] = img\n\n def slice_func(x):\n return x[:, :, :6, ::-2]\n\n lambd = Lambdad(keys=data.keys(), func=slice_func)\n expected = {}\n expected[\"img\"] = slice_func(data[\"img\"])\n self.assertTrue(np.allclose(expected[\"img\"], lambd(data)[\"img\"]))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_list_data_collate.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_list_data_collate.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_list_data_collate.py", "file_name": "test_list_data_collate.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 47, "span_ids": ["impl:21", "TestListDataCollate.test_type_shape", "TestListDataCollate", "docstring"], "tokens": 426}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.data import list_data_collate\n\na = {\"image\": np.array([1, 2, 3]), \"label\": np.array([4, 5, 6])}\nb = {\"image\": np.array([7, 8, 9]), \"label\": np.array([10, 11, 12])}\nc = {\"image\": np.array([13, 14, 15]), \"label\": np.array([16, 7, 18])}\nd = {\"image\": np.array([19, 20, 21]), \"label\": np.array([22, 23, 24])}\nTEST_CASE_1 = [[[a, b], [c, d]], dict, torch.Size([4, 3])] # dataset returns a list of dictionary data\n\ne = (np.array([1, 2, 3]), np.array([4, 5, 6]))\nf = (np.array([7, 8, 9]), np.array([10, 11, 12]))\ng = (np.array([13, 14, 15]), np.array([16, 7, 18]))\nh = (np.array([19, 20, 21]), np.array([22, 23, 24]))\nTEST_CASE_2 = [[[e, f], [g, h]], list, torch.Size([4, 3])] # dataset returns a list of tuple data\n\n\nclass TestListDataCollate(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_type_shape(self, input_data, expected_type, expected_shape):\n result = list_data_collate(input_data)\n self.assertIsInstance(result, expected_type)\n if isinstance(result, dict):\n data = result[\"image\"]\n else:\n data = result[0]\n self.assertEqual(data.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_os_TestLoadSpacingOrientation.test_load_spacingd.None_5": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_os_TestLoadSpacingOrientation.test_load_spacingd.None_5", "embedding": null, "metadata": {"file_path": "tests/test_load_spacing_orientation.py", "file_name": "test_load_spacing_orientation.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 46, "span_ids": ["TestLoadSpacingOrientation.test_load_spacingd", "TestLoadSpacingOrientation", "docstring"], "tokens": 366}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport time\nimport unittest\n\nimport nibabel\nimport numpy as np\nfrom nibabel.processing import resample_to_output\nfrom parameterized import parameterized\n\nfrom monai.transforms import AddChanneld, LoadImaged, Orientationd, Spacingd\n\nFILES = tuple(\n os.path.join(os.path.dirname(__file__), \"testing_data\", filename)\n for filename in (\"anatomical.nii\", \"reoriented_anat_moved.nii\")\n)\n\n\nclass TestLoadSpacingOrientation(unittest.TestCase):\n @parameterized.expand(FILES)\n def test_load_spacingd(self, filename):\n data = {\"image\": filename}\n data_dict = LoadImaged(keys=\"image\")(data)\n data_dict = AddChanneld(keys=\"image\")(data_dict)\n t = time.time()\n res_dict = Spacingd(keys=\"image\", pixdim=(1, 0.2, 1), diagonal=True, padding_mode=\"zeros\")(data_dict)\n t1 = time.time()\n print(f\"time monai: {t1 - t}\")\n anat = nibabel.Nifti1Image(data_dict[\"image\"][0], data_dict[\"image_meta_dict\"][\"original_affine\"])\n ref = resample_to_output(anat, (1, 0.2, 1), order=1)\n t2 = time.time()\n print(f\"time scipy: {t2 - t1}\")\n self.assertTrue(t2 >= t1)\n np.testing.assert_allclose(res_dict[\"image_meta_dict\"][\"affine\"], ref.affine)\n np.testing.assert_allclose(res_dict[\"image\"].shape[1:], ref.shape)\n np.testing.assert_allclose(ref.get_fdata(), res_dict[\"image\"][0], atol=0.05)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_TestLoadSpacingOrientation.test_load_spacingd_rotate.if_anatomical_not_in_fi.else_.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_TestLoadSpacingOrientation.test_load_spacingd_rotate.if_anatomical_not_in_fi.else_.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_load_spacing_orientation.py", "file_name": "test_load_spacing_orientation.py", "file_type": "text/x-python", "category": "test", "start_line": 48, "end_line": 73, "span_ids": ["TestLoadSpacingOrientation.test_load_spacingd_rotate"], "tokens": 421}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadSpacingOrientation(unittest.TestCase):\n\n @parameterized.expand(FILES)\n def test_load_spacingd_rotate(self, filename):\n data = {\"image\": filename}\n data_dict = LoadImaged(keys=\"image\")(data)\n data_dict = AddChanneld(keys=\"image\")(data_dict)\n affine = data_dict[\"image_meta_dict\"][\"affine\"]\n data_dict[\"image_meta_dict\"][\"original_affine\"] = data_dict[\"image_meta_dict\"][\"affine\"] = (\n np.array([[0, 0, 1, 0], [0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, 0, 1]]) @ affine\n )\n t = time.time()\n res_dict = Spacingd(keys=\"image\", pixdim=(1, 2, 3), diagonal=True, padding_mode=\"zeros\")(data_dict)\n t1 = time.time()\n print(f\"time monai: {t1 - t}\")\n anat = nibabel.Nifti1Image(data_dict[\"image\"][0], data_dict[\"image_meta_dict\"][\"original_affine\"])\n ref = resample_to_output(anat, (1, 2, 3), order=1)\n t2 = time.time()\n print(f\"time scipy: {t2 - t1}\")\n self.assertTrue(t2 >= t1)\n np.testing.assert_allclose(res_dict[\"image_meta_dict\"][\"affine\"], ref.affine)\n if \"anatomical\" not in filename:\n np.testing.assert_allclose(res_dict[\"image\"].shape[1:], ref.shape)\n np.testing.assert_allclose(ref.get_fdata(), res_dict[\"image\"][0], atol=0.05)\n else:\n # different from the ref implementation (shape computed by round\n # instead of ceil)\n np.testing.assert_allclose(ref.get_fdata()[..., :-1], res_dict[\"image\"][0], atol=0.05)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_non_diag_TestLoadSpacingOrientation.test_load_spacingd_non_diag.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_non_diag_TestLoadSpacingOrientation.test_load_spacingd_non_diag.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_load_spacing_orientation.py", "file_name": "test_load_spacing_orientation.py", "file_type": "text/x-python", "category": "test", "start_line": 75, "end_line": 94, "span_ids": ["TestLoadSpacingOrientation.test_load_spacingd_non_diag"], "tokens": 300}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadSpacingOrientation(unittest.TestCase):\n\n def test_load_spacingd_non_diag(self):\n data = {\"image\": FILES[1]}\n data_dict = LoadImaged(keys=\"image\")(data)\n data_dict = AddChanneld(keys=\"image\")(data_dict)\n affine = data_dict[\"image_meta_dict\"][\"affine\"]\n data_dict[\"image_meta_dict\"][\"original_affine\"] = data_dict[\"image_meta_dict\"][\"affine\"] = (\n np.array([[0, 0, 1, 0], [0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, 0, 1]]) @ affine\n )\n res_dict = Spacingd(keys=\"image\", pixdim=(1, 2, 3), diagonal=False, padding_mode=\"zeros\")(data_dict)\n np.testing.assert_allclose(\n res_dict[\"image_meta_dict\"][\"affine\"],\n np.array(\n [\n [0.0, 0.0, 3.0, -27.599409],\n [0.0, 2.0, 0.0, -47.977585],\n [-1.0, 0.0, 0.0, 35.297897],\n [0.0, 0.0, 0.0, 1.0],\n ]\n ),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_load_spacing_orientation.py", "file_name": "test_load_spacing_orientation.py", "file_type": "text/x-python", "category": "test", "start_line": 96, "end_line": 104, "span_ids": ["TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadSpacingOrientation(unittest.TestCase):\n\n def test_load_spacingd_rotate_non_diag(self):\n data = {\"image\": FILES[0]}\n data_dict = LoadImaged(keys=\"image\")(data)\n data_dict = AddChanneld(keys=\"image\")(data_dict)\n res_dict = Spacingd(keys=\"image\", pixdim=(1, 2, 3), diagonal=False, padding_mode=\"border\")(data_dict)\n np.testing.assert_allclose(\n res_dict[\"image_meta_dict\"][\"affine\"],\n np.array([[-1.0, 0.0, 0.0, 32.0], [0.0, 2.0, 0.0, -40.0], [0.0, 0.0, 3.0, -16.0], [0.0, 0.0, 0.0, 1.0]]),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_ornt_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_ornt.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_ornt_TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_ornt.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_load_spacing_orientation.py", "file_name": "test_load_spacing_orientation.py", "file_type": "text/x-python", "category": "test", "start_line": 106, "end_line": 115, "span_ids": ["TestLoadSpacingOrientation.test_load_spacingd_rotate_non_diag_ornt"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadSpacingOrientation(unittest.TestCase):\n\n def test_load_spacingd_rotate_non_diag_ornt(self):\n data = {\"image\": FILES[0]}\n data_dict = LoadImaged(keys=\"image\")(data)\n data_dict = AddChanneld(keys=\"image\")(data_dict)\n res_dict = Spacingd(keys=\"image\", pixdim=(1, 2, 3), diagonal=False, padding_mode=\"border\")(data_dict)\n res_dict = Orientationd(keys=\"image\", axcodes=\"LPI\")(res_dict)\n np.testing.assert_allclose(\n res_dict[\"image_meta_dict\"][\"affine\"],\n np.array([[-1.0, 0.0, 0.0, 32.0], [0.0, -2.0, 0.0, 40.0], [0.0, 0.0, -3.0, 32.0], [0.0, 0.0, 0.0, 1.0]]),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_non_diag_ornt_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_spacing_orientation.py_TestLoadSpacingOrientation.test_load_spacingd_non_diag_ornt_", "embedding": null, "metadata": {"file_path": "tests/test_load_spacing_orientation.py", "file_name": "test_load_spacing_orientation.py", "file_type": "text/x-python", "category": "test", "start_line": 117, "end_line": 142, "span_ids": ["impl:3", "TestLoadSpacingOrientation.test_load_spacingd_non_diag_ornt"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadSpacingOrientation(unittest.TestCase):\n\n def test_load_spacingd_non_diag_ornt(self):\n data = {\"image\": FILES[1]}\n data_dict = LoadImaged(keys=\"image\")(data)\n data_dict = AddChanneld(keys=\"image\")(data_dict)\n affine = data_dict[\"image_meta_dict\"][\"affine\"]\n data_dict[\"image_meta_dict\"][\"original_affine\"] = data_dict[\"image_meta_dict\"][\"affine\"] = (\n np.array([[0, 0, 1, 0], [0, 1, 0, 0], [-1, 0, 0, 0], [0, 0, 0, 1]]) @ affine\n )\n res_dict = Spacingd(keys=\"image\", pixdim=(1, 2, 3), diagonal=False, padding_mode=\"border\")(data_dict)\n res_dict = Orientationd(keys=\"image\", axcodes=\"LPI\")(res_dict)\n np.testing.assert_allclose(\n res_dict[\"image_meta_dict\"][\"affine\"],\n np.array(\n [\n [-3.0, 0.0, 0.0, 56.4005909],\n [0.0, -2.0, 0.0, 52.02241516],\n [0.0, 0.0, -1.0, 35.29789734],\n [0.0, 0.0, 0.0, 1.0],\n ]\n ),\n )\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_transform.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_transform.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_map_transform.py", "file_name": "test_map_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 42, "span_ids": ["TestRandomizable.test_wrong_keys", "MapTest", "MapTest.__call__", "TestRandomizable.test_keys", "impl:5", "docstring", "TestRandomizable"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nfrom parameterized import parameterized\n\nfrom monai.transforms import MapTransform\n\nTEST_CASES = [[\"item\", (\"item\",)], [None, (None,)], [[\"item1\", \"item2\"], (\"item1\", \"item2\")]]\n\nTEST_ILL_CASES = [[ValueError, []], [ValueError, ()], [TypeError, [[]]]]\n\n\nclass MapTest(MapTransform):\n def __call__(self, data):\n pass\n\n\nclass TestRandomizable(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_keys(self, keys, expected):\n transform = MapTest(keys=keys)\n self.assertEqual(transform.keys, expected)\n\n @parameterized.expand(TEST_ILL_CASES)\n def test_wrong_keys(self, exception, keys):\n with self.assertRaisesRegex(exception, \"\"):\n MapTest(keys=keys)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mask_intensity.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mask_intensity.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_mask_intensity.py", "file_name": "test_mask_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 47, "span_ids": ["TestMaskIntensity.test_value", "TestMaskIntensity", "impl:7", "docstring"], "tokens": 612}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import MaskIntensity\n\nTEST_CASE_1 = [\n {\"mask_data\": np.array([[[0, 0, 0], [0, 1, 0], [0, 0, 0]]])},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array([[[0, 0, 0], [0, 2, 0], [0, 0, 0]], [[0, 0, 0], [0, 5, 0], [0, 0, 0]]]),\n]\n\nTEST_CASE_2 = [\n {\"mask_data\": np.array([[[0, 0, 0], [0, 5, 0], [0, 0, 0]]])},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array([[[0, 0, 0], [0, 2, 0], [0, 0, 0]], [[0, 0, 0], [0, 5, 0], [0, 0, 0]]]),\n]\n\nTEST_CASE_3 = [\n {\"mask_data\": np.array([[[0, 0, 0], [0, 1, 0], [0, 0, 0]], [[0, 1, 0], [0, 1, 0], [0, 1, 0]]])},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array([[[0, 0, 0], [0, 2, 0], [0, 0, 0]], [[0, 4, 0], [0, 5, 0], [0, 6, 0]]]),\n]\n\n\nclass TestMaskIntensity(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, argments, image, expected_data):\n result = MaskIntensity(**argments)(image)\n np.testing.assert_allclose(result, expected_data)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_unittest_TEST_CASES": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_unittest_TEST_CASES", "embedding": null, "metadata": {"file_path": "tests/test_masked_dice_loss.py", "file_name": "test_masked_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 115, "span_ids": ["docstring"], "tokens": 37}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.losses import MaskedDiceLoss\n\nTEST_CASES =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss_TestDiceLoss.test_ill_shape.with_self_assertRaisesReg.loss_forward_torch_ones_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss_TestDiceLoss.test_ill_shape.with_self_assertRaisesReg.loss_forward_torch_ones_", "embedding": null, "metadata": {"file_path": "tests/test_masked_dice_loss.py", "file_name": "test_masked_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 118, "end_line": 127, "span_ids": ["TestDiceLoss.test_shape", "TestDiceLoss.test_ill_shape", "TestDiceLoss"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDiceLoss(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_shape(self, input_param, input_data, expected_val):\n result = MaskedDiceLoss(**input_param).forward(**input_data)\n np.testing.assert_allclose(result.detach().cpu().numpy(), expected_val, rtol=1e-5)\n\n def test_ill_shape(self):\n loss = MaskedDiceLoss()\n with self.assertRaisesRegex(AssertionError, \"\"):\n loss.forward(torch.ones((1, 2, 3)), torch.ones((4, 5, 6)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss.test_ill_opts_TestDiceLoss.test_ill_opts.None_2.MaskedDiceLoss_reduction_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss.test_ill_opts_TestDiceLoss.test_ill_opts.None_2.MaskedDiceLoss_reduction_", "embedding": null, "metadata": {"file_path": "tests/test_masked_dice_loss.py", "file_name": "test_masked_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 129, "end_line": 137, "span_ids": ["TestDiceLoss.test_ill_opts"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDiceLoss(unittest.TestCase):\n\n def test_ill_opts(self):\n with self.assertRaisesRegex(ValueError, \"\"):\n MaskedDiceLoss(sigmoid=True, softmax=True)\n chn_input = torch.ones((1, 1, 3))\n chn_target = torch.ones((1, 1, 3))\n with self.assertRaisesRegex(ValueError, \"\"):\n MaskedDiceLoss(reduction=\"unknown\")(chn_input, chn_target)\n with self.assertRaisesRegex(ValueError, \"\"):\n MaskedDiceLoss(reduction=None)(chn_input, chn_target)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss.test_input_warnings_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_masked_dice_loss.py_TestDiceLoss.test_input_warnings_", "embedding": null, "metadata": {"file_path": "tests/test_masked_dice_loss.py", "file_name": "test_masked_dice_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 139, "end_line": 155, "span_ids": ["impl:3", "TestDiceLoss.test_input_warnings"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestDiceLoss(unittest.TestCase):\n\n def test_input_warnings(self):\n chn_input = torch.ones((1, 1, 3))\n chn_target = torch.ones((1, 1, 3))\n with self.assertWarns(Warning):\n loss = MaskedDiceLoss(include_background=False)\n loss.forward(chn_input, chn_target)\n with self.assertWarns(Warning):\n loss = MaskedDiceLoss(softmax=True)\n loss.forward(chn_input, chn_target)\n with self.assertWarns(Warning):\n loss = MaskedDiceLoss(to_onehot_y=True)\n loss.forward(chn_input, chn_target)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensemble.py_unittest_TEST_CASE_6._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensemble.py_unittest_TEST_CASE_6._", "embedding": null, "metadata": {"file_path": "tests/test_mean_ensemble.py", "file_name": "test_mean_ensemble.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 54, "span_ids": ["impl:11", "docstring"], "tokens": 538}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import MeanEnsemble\n\nTEST_CASE_1 = [\n {\"weights\": None},\n [torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2],\n torch.ones(2, 2, 2, 2) + 1,\n]\n\nTEST_CASE_2 = [\n {\"weights\": None},\n torch.stack([torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2]),\n torch.ones(2, 2, 2, 2) + 1,\n]\n\nTEST_CASE_3 = [\n {\"weights\": [1, 3]},\n [torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2],\n torch.ones(2, 2, 2, 2) * 2.5,\n]\n\nTEST_CASE_4 = [\n {\"weights\": [[[1, 3]], [[3, 1]]]},\n [torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2],\n torch.ones(2, 2, 2, 2) * torch.tensor([2.5, 1.5]).reshape(1, 2, 1, 1),\n]\n\nTEST_CASE_5 = [\n {\"weights\": np.array([[[1, 3]], [[3, 1]]])},\n [torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2],\n torch.ones(2, 2, 2, 2) * torch.tensor([2.5, 1.5]).reshape(1, 2, 1, 1),\n]\n\nTEST_CASE_6 = [\n {\"weights\": torch.tensor([[[1, 3]], [[3, 1]]])},\n [torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2],\n torch.ones(2, 2, 2, 2) * torch.tensor([2.5, 1.5]).reshape(1, 2, 1, 1),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensemble.py_TestMeanEnsemble_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensemble.py_TestMeanEnsemble_", "embedding": null, "metadata": {"file_path": "tests/test_mean_ensemble.py", "file_name": "test_mean_ensemble.py", "file_type": "text/x-python", "category": "test", "start_line": 55, "end_line": 73, "span_ids": ["TestMeanEnsemble.test_cuda_value", "TestMeanEnsemble", "TestMeanEnsemble.test_value", "impl:13"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMeanEnsemble(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5, TEST_CASE_6])\n def test_value(self, input_param, img, expected_value):\n result = MeanEnsemble(**input_param)(img)\n torch.testing.assert_allclose(result, expected_value)\n\n def test_cuda_value(self):\n img = torch.stack([torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2])\n expected_value = torch.ones(2, 2, 2, 2) * torch.tensor([2.5, 1.5]).reshape(1, 2, 1, 1)\n if torch.cuda.is_available():\n img = img.to(torch.device(\"cuda:0\"))\n expected_value = expected_value.to(torch.device(\"cuda:0\"))\n result = MeanEnsemble(torch.tensor([[[1, 3]], [[3, 1]]]))(img)\n torch.testing.assert_allclose(result, expected_value)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensembled.py_unittest_TEST_CASE_6._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensembled.py_unittest_TEST_CASE_6._", "embedding": null, "metadata": {"file_path": "tests/test_mean_ensembled.py", "file_name": "test_mean_ensembled.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 54, "span_ids": ["impl:9", "docstring"], "tokens": 673}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import MeanEnsembled\n\nTEST_CASE_1 = [\n {\"keys\": [\"pred0\", \"pred1\"], \"output_key\": \"output\", \"weights\": None},\n {\"pred0\": torch.ones(2, 2, 2, 2), \"pred1\": torch.ones(2, 2, 2, 2) + 2},\n torch.ones(2, 2, 2, 2) + 1,\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"output\", \"weights\": None},\n {\"output\": torch.stack([torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2])},\n torch.ones(2, 2, 2, 2) + 1,\n]\n\nTEST_CASE_3 = [\n {\"keys\": [\"pred0\", \"pred1\"], \"output_key\": \"output\", \"weights\": [1, 3]},\n {\"pred0\": torch.ones(2, 2, 2, 2), \"pred1\": torch.ones(2, 2, 2, 2) + 2},\n torch.ones(2, 2, 2, 2) * 2.5,\n]\n\nTEST_CASE_4 = [\n {\"keys\": [\"pred0\", \"pred1\"], \"output_key\": \"output\", \"weights\": [[[1, 3]], [[3, 1]]]},\n {\"pred0\": torch.ones(2, 2, 2, 2), \"pred1\": torch.ones(2, 2, 2, 2) + 2},\n torch.ones(2, 2, 2, 2) * torch.tensor([2.5, 1.5]).reshape(1, 2, 1, 1),\n]\n\nTEST_CASE_5 = [\n {\"keys\": [\"pred0\", \"pred1\"], \"output_key\": \"output\", \"weights\": np.array([[[1, 3]], [[3, 1]]])},\n {\"pred0\": torch.ones(2, 2, 2, 2), \"pred1\": torch.ones(2, 2, 2, 2) + 2},\n torch.ones(2, 2, 2, 2) * torch.tensor([2.5, 1.5]).reshape(1, 2, 1, 1),\n]\n\nTEST_CASE_6 = [\n {\"keys\": [\"pred0\", \"pred1\"], \"output_key\": \"output\", \"weights\": torch.tensor([[[1, 3]], [[3, 1]]])},\n {\"pred0\": torch.ones(2, 2, 2, 2), \"pred1\": torch.ones(2, 2, 2, 2) + 2},\n torch.ones(2, 2, 2, 2) * torch.tensor([2.5, 1.5]).reshape(1, 2, 1, 1),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensembled.py_TestMeanEnsembled_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mean_ensembled.py_TestMeanEnsembled_", "embedding": null, "metadata": {"file_path": "tests/test_mean_ensembled.py", "file_name": "test_mean_ensembled.py", "file_type": "text/x-python", "category": "test", "start_line": 55, "end_line": 73, "span_ids": ["TestMeanEnsembled.test_value", "impl:13", "TestMeanEnsembled", "TestMeanEnsembled.test_cuda_value"], "tokens": 262}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestMeanEnsembled(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5, TEST_CASE_6])\n def test_value(self, input_param, data, expected_value):\n result = MeanEnsembled(**input_param)(data)\n torch.testing.assert_allclose(result[\"output\"], expected_value)\n\n def test_cuda_value(self):\n img = torch.stack([torch.ones(2, 2, 2, 2), torch.ones(2, 2, 2, 2) + 2])\n expected_value = torch.ones(2, 2, 2, 2) * torch.tensor([2.5, 1.5]).reshape(1, 2, 1, 1)\n if torch.cuda.is_available():\n img = img.to(torch.device(\"cuda:0\"))\n expected_value = expected_value.to(torch.device(\"cuda:0\"))\n result = MeanEnsembled(keys=\"output\", weights=torch.tensor([[[1, 3]], [[3, 1]]]))({\"output\": img})\n torch.testing.assert_allclose(result[\"output\"], expected_value)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_header_revise.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_header_revise.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_nifti_header_revise.py", "file_name": "test_nifti_header_revise.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 40, "span_ids": ["impl", "TestRectifyHeaderSformQform", "TestRectifyHeaderSformQform.test_revise_q", "TestRectifyHeaderSformQform.test_revise_both", "docstring"], "tokens": 299}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport nibabel as nib\nimport numpy as np\n\nfrom monai.data import rectify_header_sform_qform\n\n\nclass TestRectifyHeaderSformQform(unittest.TestCase):\n def test_revise_q(self):\n img = nib.Nifti1Image(np.zeros((10, 10, 10)), np.eye(4))\n img.header.set_zooms((0.1, 0.2, 0.3))\n output = rectify_header_sform_qform(img)\n expected = np.diag([0.1, 0.2, 0.3, 1.0])\n np.testing.assert_allclose(output.affine, expected)\n\n def test_revise_both(self):\n img = nib.Nifti1Image(np.zeros((10, 10, 10)), np.eye(4))\n img.header.set_sform(np.diag([5, 3, 4, 1]))\n img.header.set_qform(np.diag([2, 3, 4, 1]))\n img.header.set_zooms((0.1, 0.2, 0.3))\n output = rectify_header_sform_qform(img)\n expected = np.diag([0.1, 0.2, 0.3, 1.0])\n np.testing.assert_allclose(output.affine, expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_os_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_os_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_nifti_rw.py", "file_name": "test_nifti_rw.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 65, "span_ids": ["docstring"], "tokens": 533}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport nibabel as nib\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.data import write_nifti\nfrom monai.transforms import LoadImage, Orientation, Spacing\nfrom tests.utils import make_nifti_image\n\nTEST_IMAGE = np.arange(24).reshape((2, 4, 3))\nTEST_AFFINE = np.array(\n [[-5.3, 0.0, 0.0, 102.01], [0.0, 0.52, 2.17, -7.50], [-0.0, 1.98, -0.26, -23.12], [0.0, 0.0, 0.0, 1.0]]\n)\n\nTEST_CASES = [\n [\n TEST_IMAGE,\n TEST_AFFINE,\n dict(reader=\"NibabelReader\", image_only=False, as_closest_canonical=True),\n np.arange(24).reshape((2, 4, 3)),\n ],\n [\n TEST_IMAGE,\n TEST_AFFINE,\n dict(reader=\"NibabelReader\", image_only=True, as_closest_canonical=True),\n np.array(\n [\n [[12.0, 15.0, 18.0, 21.0], [13.0, 16.0, 19.0, 22.0], [14.0, 17.0, 20.0, 23.0]],\n [[0.0, 3.0, 6.0, 9.0], [1.0, 4.0, 7.0, 10.0], [2.0, 5.0, 8.0, 11.0]],\n ]\n ),\n ],\n [\n TEST_IMAGE,\n TEST_AFFINE,\n dict(reader=\"NibabelReader\", image_only=True, as_closest_canonical=False),\n np.arange(24).reshape((2, 4, 3)),\n ],\n [\n TEST_IMAGE,\n TEST_AFFINE,\n dict(reader=\"NibabelReader\", image_only=False, as_closest_canonical=False),\n np.arange(24).reshape((2, 4, 3)),\n ],\n [\n TEST_IMAGE,\n None,\n dict(reader=\"NibabelReader\", image_only=False, as_closest_canonical=False),\n np.arange(24).reshape((2, 4, 3)),\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead_TestNiftiLoadRead.test_orientation.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead_TestNiftiLoadRead.test_orientation.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_nifti_rw.py", "file_name": "test_nifti_rw.py", "file_type": "text/x-python", "category": "test", "start_line": 68, "end_line": 97, "span_ids": ["TestNiftiLoadRead", "TestNiftiLoadRead.test_orientation"], "tokens": 245}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNiftiLoadRead(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_orientation(self, array, affine, reader_param, expected):\n test_image = make_nifti_image(array, affine)\n\n # read test cases\n loader = LoadImage(**reader_param)\n load_result = loader(test_image)\n if isinstance(load_result, tuple):\n data_array, header = load_result\n else:\n data_array = load_result\n header = None\n if os.path.exists(test_image):\n os.remove(test_image)\n\n # write test cases\n if header is not None:\n write_nifti(data_array, test_image, header[\"affine\"], header.get(\"original_affine\", None))\n elif affine is not None:\n write_nifti(data_array, test_image, affine)\n saved = nib.load(test_image)\n saved_affine = saved.affine\n saved_data = saved.get_fdata()\n if os.path.exists(test_image):\n os.remove(test_image)\n\n if affine is not None:\n np.testing.assert_allclose(saved_affine, affine)\n np.testing.assert_allclose(saved_data, expected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_consistency_TestNiftiLoadRead.test_consistency.None_2.os_remove_test_image_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_consistency_TestNiftiLoadRead.test_consistency.None_2.os_remove_test_image_", "embedding": null, "metadata": {"file_path": "tests/test_nifti_rw.py", "file_name": "test_nifti_rw.py", "file_type": "text/x-python", "category": "test", "start_line": 99, "end_line": 126, "span_ids": ["TestNiftiLoadRead.test_consistency"], "tokens": 377}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNiftiLoadRead(unittest.TestCase):\n\n def test_consistency(self):\n np.set_printoptions(suppress=True, precision=3)\n test_image = make_nifti_image(np.arange(64).reshape(1, 8, 8), np.diag([1.5, 1.5, 1.5, 1]))\n data, header = LoadImage(reader=\"NibabelReader\", as_closest_canonical=False)(test_image)\n data, original_affine, new_affine = Spacing([0.8, 0.8, 0.8])(data[None], header[\"affine\"], mode=\"nearest\")\n data, _, new_affine = Orientation(\"ILP\")(data, new_affine)\n if os.path.exists(test_image):\n os.remove(test_image)\n write_nifti(data[0], test_image, new_affine, original_affine, mode=\"nearest\", padding_mode=\"border\")\n saved = nib.load(test_image)\n saved_data = saved.get_fdata()\n np.testing.assert_allclose(saved_data, np.arange(64).reshape(1, 8, 8), atol=1e-7)\n if os.path.exists(test_image):\n os.remove(test_image)\n write_nifti(\n data[0],\n test_image,\n new_affine,\n original_affine,\n mode=\"nearest\",\n padding_mode=\"border\",\n output_spatial_shape=(1, 8, 8),\n )\n saved = nib.load(test_image)\n saved_data = saved.get_fdata()\n np.testing.assert_allclose(saved_data, np.arange(64).reshape(1, 8, 8), atol=1e-7)\n if os.path.exists(test_image):\n os.remove(test_image)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_5d_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_5d_", "embedding": null, "metadata": {"file_path": "tests/test_nifti_rw.py", "file_name": "test_nifti_rw.py", "file_type": "text/x-python", "category": "test", "start_line": 156, "end_line": 178, "span_ids": ["TestNiftiLoadRead.test_write_5d", "impl:7"], "tokens": 399}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNiftiLoadRead(unittest.TestCase):\n\n def test_write_5d(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.nii.gz\")\n img = np.arange(12).reshape((1, 1, 3, 2, 2))\n write_nifti(img, image_name, affine=np.diag([1]), target_affine=np.diag([1.4]))\n out = nib.load(image_name)\n np.testing.assert_allclose(\n out.get_fdata(),\n np.array([[[[[0.0, 1.0], [2.0, 3.0]], [[4.0, 5.0], [6.0, 7.0]], [[8.0, 9.0], [10.0, 11.0]]]]]),\n )\n np.testing.assert_allclose(out.affine, np.diag([1.4, 1, 1, 1]))\n\n image_name = os.path.join(out_dir, \"test1.nii.gz\")\n img = np.arange(10).reshape((1, 1, 5, 1, 2))\n write_nifti(img, image_name, affine=np.diag([1, 1, 1, 3, 3]), target_affine=np.diag([1.4, 2.0, 2, 3, 5]))\n out = nib.load(image_name)\n np.testing.assert_allclose(out.get_fdata(), np.array([[[[[0.0, 1.0]], [[4.0, 5.0]], [[8.0, 9.0]]]]]))\n np.testing.assert_allclose(out.affine, np.diag([1.4, 2, 2, 1]))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity_TestNormalizeIntensity.test_nonzero.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity_TestNormalizeIntensity.test_nonzero.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_normalize_intensity.py", "file_name": "test_normalize_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 58, "end_line": 69, "span_ids": ["TestNormalizeIntensity.test_default", "TestNormalizeIntensity", "TestNormalizeIntensity.test_nonzero"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNormalizeIntensity(NumpyImageTestCase2D):\n def test_default(self):\n normalizer = NormalizeIntensity()\n normalized = normalizer(self.imt.copy())\n self.assertTrue(normalized.dtype == np.float32)\n expected = (self.imt - np.mean(self.imt)) / np.std(self.imt)\n np.testing.assert_allclose(normalized, expected, rtol=1e-5)\n\n @parameterized.expand(TEST_CASES)\n def test_nonzero(self, input_param, input_data, expected_data):\n normalizer = NormalizeIntensity(**input_param)\n np.testing.assert_allclose(expected_data, normalizer(input_data))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_unittest_TEST_CASE_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_unittest_TEST_CASE_3._", "embedding": null, "metadata": {"file_path": "tests/test_normalize_intensityd.py", "file_name": "test_normalize_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["docstring"], "tokens": 306}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import NormalizeIntensityd\nfrom tests.utils import NumpyImageTestCase2D\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\"], \"nonzero\": True},\n {\"img\": np.array([0.0, 3.0, 0.0, 4.0])},\n np.array([0.0, -1.0, 0.0, 1.0]),\n]\n\nTEST_CASE_2 = [\n {\n \"keys\": [\"img\"],\n \"subtrahend\": np.array([3.5, 3.5, 3.5, 3.5]),\n \"divisor\": np.array([0.5, 0.5, 0.5, 0.5]),\n \"nonzero\": True,\n },\n {\"img\": np.array([0.0, 3.0, 0.0, 4.0])},\n np.array([0.0, -1.0, 0.0, 1.0]),\n]\n\nTEST_CASE_3 = [\n {\"keys\": [\"img\"], \"nonzero\": True},\n {\"img\": np.array([0.0, 0.0, 0.0, 0.0])},\n np.array([0.0, 0.0, 0.0, 0.0]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_TestNormalizeIntensityd_TestNormalizeIntensityd.test_nonzero.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_TestNormalizeIntensityd_TestNormalizeIntensityd.test_nonzero.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_normalize_intensityd.py", "file_name": "test_normalize_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 44, "end_line": 55, "span_ids": ["TestNormalizeIntensityd.test_nonzero", "TestNormalizeIntensityd", "TestNormalizeIntensityd.test_image_normalize_intensityd"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNormalizeIntensityd(NumpyImageTestCase2D):\n def test_image_normalize_intensityd(self):\n key = \"img\"\n normalizer = NormalizeIntensityd(keys=[key])\n normalized = normalizer({key: self.imt})\n expected = (self.imt - np.mean(self.imt)) / np.std(self.imt)\n np.testing.assert_allclose(normalized[key], expected, rtol=1e-5)\n\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_nonzero(self, input_param, input_data, expected_data):\n normalizer = NormalizeIntensityd(**input_param)\n np.testing.assert_allclose(expected_data, normalizer(input_data)[\"img\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_TestNormalizeIntensityd.test_channel_wise_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensityd.py_TestNormalizeIntensityd.test_channel_wise_", "embedding": null, "metadata": {"file_path": "tests/test_normalize_intensityd.py", "file_name": "test_normalize_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 56, "end_line": 66, "span_ids": ["TestNormalizeIntensityd.test_channel_wise", "impl:7"], "tokens": 166}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNormalizeIntensityd(NumpyImageTestCase2D):\n\n def test_channel_wise(self):\n key = \"img\"\n normalizer = NormalizeIntensityd(keys=key, nonzero=True, channel_wise=True)\n input_data = {key: np.array([[0.0, 3.0, 0.0, 4.0], [0.0, 4.0, 0.0, 5.0]])}\n expected = np.array([[0.0, -1.0, 0.0, 1.0], [0.0, -1.0, 0.0, 1.0]])\n np.testing.assert_allclose(expected, normalizer(input_data)[key])\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_unittest_TestOptionalImport.test_import_wrong_number.None_2.print_my_module_randint_1": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_unittest_TestOptionalImport.test_import_wrong_number.None_2.print_my_module_randint_1", "embedding": null, "metadata": {"file_path": "tests/test_optional_import.py", "file_name": "test_optional_import.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 45, "span_ids": ["TestOptionalImport", "TestOptionalImport.test_import_valid", "TestOptionalImport.test_import_wrong_number", "docstring", "TestOptionalImport.test_default"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nfrom monai.utils import OptionalImportError, exact_version, optional_import\n\n\nclass TestOptionalImport(unittest.TestCase):\n def test_default(self):\n my_module, flag = optional_import(\"not_a_module\")\n self.assertFalse(flag)\n with self.assertRaises(OptionalImportError):\n my_module.test\n\n my_module, flag = optional_import(\"torch.randint\")\n with self.assertRaises(OptionalImportError):\n self.assertFalse(flag)\n print(my_module.test)\n\n def test_import_valid(self):\n my_module, flag = optional_import(\"torch\")\n self.assertTrue(flag)\n print(my_module.randint(1, 2, (1, 2)))\n\n def test_import_wrong_number(self):\n my_module, flag = optional_import(\"torch\", \"42\")\n with self.assertRaisesRegex(OptionalImportError, \"version\"):\n my_module.nn\n self.assertFalse(flag)\n with self.assertRaisesRegex(OptionalImportError, \"version\"):\n my_module.randint(1, 2, (1, 2))\n with self.assertRaisesRegex(ValueError, \"invalid literal\"):\n my_module, flag = optional_import(\"torch\", \"test\") # version should be number.number\n my_module.nn\n self.assertTrue(flag)\n print(my_module.randint(1, 2, (1, 2)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_TestOptionalImport.test_import_good_number_TestOptionalImport.test_import_good_number.None_5": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_TestOptionalImport.test_import_good_number_TestOptionalImport.test_import_good_number.None_5", "embedding": null, "metadata": {"file_path": "tests/test_optional_import.py", "file_name": "test_optional_import.py", "file_type": "text/x-python", "category": "test", "start_line": 47, "end_line": 61, "span_ids": ["TestOptionalImport.test_import_good_number"], "tokens": 148}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOptionalImport(unittest.TestCase):\n\n def test_import_good_number(self):\n my_module, flag = optional_import(\"torch\", \"0\")\n my_module.nn\n self.assertTrue(flag)\n print(my_module.randint(1, 2, (1, 2)))\n\n my_module, flag = optional_import(\"torch\", \"0.0.0.1\")\n my_module.nn\n self.assertTrue(flag)\n print(my_module.randint(1, 2, (1, 2)))\n\n my_module, flag = optional_import(\"torch\", \"1.1.0\")\n my_module.nn\n self.assertTrue(flag)\n print(my_module.randint(1, 2, (1, 2)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_TestOptionalImport.test_import_exact_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optional_import.py_TestOptionalImport.test_import_exact_", "embedding": null, "metadata": {"file_path": "tests/test_optional_import.py", "file_name": "test_optional_import.py", "file_type": "text/x-python", "category": "test", "start_line": 63, "end_line": 89, "span_ids": ["TestOptionalImport.test_import_exact", "TestOptionalImport.test_additional", "impl", "TestOptionalImport.test_import_method"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOptionalImport(unittest.TestCase):\n\n def test_import_exact(self):\n my_module, flag = optional_import(\"torch\", \"0\", exact_version)\n with self.assertRaisesRegex(OptionalImportError, \"exact_version\"):\n my_module.nn\n self.assertFalse(flag)\n with self.assertRaisesRegex(OptionalImportError, \"exact_version\"):\n my_module.randint(1, 2, (1, 2))\n\n def test_import_method(self):\n nn, flag = optional_import(\"torch\", \"1.1\", name=\"nn\")\n self.assertTrue(flag)\n print(nn.functional)\n\n def test_additional(self):\n test_args = {\"a\": \"test\", \"b\": \"test\"}\n\n def versioning(module, ver, a):\n self.assertEqual(a, test_args)\n return True\n\n nn, flag = optional_import(\"torch\", \"1.1\", version_checker=versioning, name=\"nn\", version_args=test_args)\n self.assertTrue(flag)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientation.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientation.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_orientation.py", "file_name": "test_orientation.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 102, "span_ids": ["docstring"], "tokens": 1208}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport nibabel as nib\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import Orientation, create_rotate, create_translate\n\nTEST_CASES = [\n [\n {\"axcodes\": \"RAS\"},\n np.arange(12).reshape((2, 1, 2, 3)),\n {\"affine\": np.eye(4)},\n np.arange(12).reshape((2, 1, 2, 3)),\n \"RAS\",\n ],\n [\n {\"axcodes\": \"ALS\"},\n np.arange(12).reshape((2, 1, 2, 3)),\n {\"affine\": np.diag([-1, -1, 1, 1])},\n np.array([[[[3, 4, 5]], [[0, 1, 2]]], [[[9, 10, 11]], [[6, 7, 8]]]]),\n \"ALS\",\n ],\n [\n {\"axcodes\": \"RAS\"},\n np.arange(12).reshape((2, 1, 2, 3)),\n {\"affine\": np.diag([-1, -1, 1, 1])},\n np.array([[[[3, 4, 5], [0, 1, 2]]], [[[9, 10, 11], [6, 7, 8]]]]),\n \"RAS\",\n ],\n [\n {\"axcodes\": \"AL\"},\n np.arange(6).reshape((2, 1, 3)),\n {\"affine\": np.eye(3)},\n np.array([[[0], [1], [2]], [[3], [4], [5]]]),\n \"AL\",\n ],\n [{\"axcodes\": \"L\"}, np.arange(6).reshape((2, 3)), {\"affine\": np.eye(2)}, np.array([[2, 1, 0], [5, 4, 3]]), \"L\"],\n [{\"axcodes\": \"L\"}, np.arange(6).reshape((2, 3)), {\"affine\": np.eye(2)}, np.array([[2, 1, 0], [5, 4, 3]]), \"L\"],\n [{\"axcodes\": \"L\"}, np.arange(6).reshape((2, 3)), {\"affine\": np.diag([-1, 1])}, np.arange(6).reshape((2, 3)), \"L\"],\n [\n {\"axcodes\": \"LPS\"},\n np.arange(12).reshape((2, 1, 2, 3)),\n {\n \"affine\": create_translate(3, (10, 20, 30))\n @ create_rotate(3, (np.pi / 2, np.pi / 2, np.pi / 4))\n @ np.diag([-1, 1, 1, 1])\n },\n np.array([[[[2, 5]], [[1, 4]], [[0, 3]]], [[[8, 11]], [[7, 10]], [[6, 9]]]]),\n \"LPS\",\n ],\n [\n {\"as_closest_canonical\": True},\n np.arange(12).reshape((2, 1, 2, 3)),\n {\n \"affine\": create_translate(3, (10, 20, 30))\n @ create_rotate(3, (np.pi / 2, np.pi / 2, np.pi / 4))\n @ np.diag([-1, 1, 1, 1])\n },\n np.array([[[[0, 3]], [[1, 4]], [[2, 5]]], [[[6, 9]], [[7, 10]], [[8, 11]]]]),\n \"RAS\",\n ],\n [\n {\"as_closest_canonical\": True},\n np.arange(6).reshape((1, 2, 3)),\n {\"affine\": create_translate(2, (10, 20)) @ create_rotate(2, (np.pi / 3)) @ np.diag([-1, -0.2, 1])},\n np.array([[[3, 0], [4, 1], [5, 2]]]),\n \"RA\",\n ],\n [\n {\"axcodes\": \"LP\"},\n np.arange(6).reshape((1, 2, 3)),\n {\"affine\": create_translate(2, (10, 20)) @ create_rotate(2, (np.pi / 3)) @ np.diag([-1, -0.2, 1])},\n np.array([[[2, 5], [1, 4], [0, 3]]]),\n \"LP\",\n ],\n [\n {\"axcodes\": \"LPID\", \"labels\": tuple(zip(\"LPIC\", \"RASD\"))},\n np.zeros((1, 2, 3, 4, 5)),\n {\"affine\": np.diag([-1, -0.2, -1, 1, 1])},\n np.zeros((1, 2, 3, 4, 5)),\n \"LPID\",\n ],\n [\n {\"as_closest_canonical\": True, \"labels\": tuple(zip(\"LPIC\", \"RASD\"))},\n np.zeros((1, 2, 3, 4, 5)),\n {\"affine\": np.diag([-1, -0.2, -1, 1, 1])},\n np.zeros((1, 2, 3, 4, 5)),\n \"RASD\",\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientation.py_ILL_CASES_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientation.py_ILL_CASES_", "embedding": null, "metadata": {"file_path": "tests/test_orientation.py", "file_name": "test_orientation.py", "file_type": "text/x-python", "category": "test", "start_line": 104, "end_line": 131, "span_ids": ["impl:3", "TestOrientationCase.test_bad_params", "TestOrientationCase", "TestOrientationCase.test_ornt", "impl:5"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "ILL_CASES = [\n # no axcodes or as_cloest_canonical\n [{}, np.arange(6).reshape((2, 3)), \"L\"],\n # too short axcodes\n [{\"axcodes\": \"RA\"}, np.arange(12).reshape((2, 1, 2, 3)), {\"affine\": np.eye(4)}],\n]\n\n\nclass TestOrientationCase(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_ornt(self, init_param, img, data_param, expected_data, expected_code):\n ornt = Orientation(**init_param)\n res = ornt(img, **data_param)\n np.testing.assert_allclose(res[0], expected_data)\n original_affine = data_param[\"affine\"]\n np.testing.assert_allclose(original_affine, res[1])\n new_code = nib.orientations.aff2axcodes(res[2], labels=ornt.labels)\n self.assertEqual(\"\".join(new_code), expected_code)\n\n @parameterized.expand(ILL_CASES)\n def test_bad_params(self, init_param, img, data_param):\n with self.assertRaises(ValueError):\n Orientation(**init_param)(img, **data_param)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_unittest_TestOrientationdCase.test_orntd.self_assertEqual_code_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_unittest_TestOrientationdCase.test_orntd.self_assertEqual_code_", "embedding": null, "metadata": {"file_path": "tests/test_orientationd.py", "file_name": "test_orientationd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 27, "span_ids": ["TestOrientationdCase.test_orntd", "TestOrientationdCase", "docstring"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport nibabel as nib\nimport numpy as np\n\nfrom monai.transforms import Orientationd\n\n\nclass TestOrientationdCase(unittest.TestCase):\n def test_orntd(self):\n data = {\"seg\": np.ones((2, 1, 2, 3)), \"seg_meta_dict\": {\"affine\": np.eye(4)}}\n ornt = Orientationd(keys=\"seg\", axcodes=\"RAS\")\n res = ornt(data)\n np.testing.assert_allclose(res[\"seg\"].shape, (2, 1, 2, 3))\n code = nib.aff2axcodes(res[\"seg_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"R\", \"A\", \"S\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_3d_TestOrientationdCase.test_orntd_3d.None_3": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_3d_TestOrientationdCase.test_orntd_3d.None_3", "embedding": null, "metadata": {"file_path": "tests/test_orientationd.py", "file_name": "test_orientationd.py", "file_type": "text/x-python", "category": "test", "start_line": 29, "end_line": 43, "span_ids": ["TestOrientationdCase.test_orntd_3d"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOrientationdCase(unittest.TestCase):\n\n def test_orntd_3d(self):\n data = {\n \"seg\": np.ones((2, 1, 2, 3)),\n \"img\": np.ones((2, 1, 2, 3)),\n \"seg_meta_dict\": {\"affine\": np.eye(4)},\n \"img_meta_dict\": {\"affine\": np.eye(4)},\n }\n ornt = Orientationd(keys=(\"img\", \"seg\"), axcodes=\"PLI\")\n res = ornt(data)\n np.testing.assert_allclose(res[\"img\"].shape, (2, 2, 1, 3))\n np.testing.assert_allclose(res[\"seg\"].shape, (2, 2, 1, 3))\n code = nib.aff2axcodes(res[\"seg_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"P\", \"L\", \"I\"))\n code = nib.aff2axcodes(res[\"img_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"P\", \"L\", \"I\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_2d_TestOrientationdCase.test_orntd_2d.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_2d_TestOrientationdCase.test_orntd_2d.None_2", "embedding": null, "metadata": {"file_path": "tests/test_orientationd.py", "file_name": "test_orientationd.py", "file_type": "text/x-python", "category": "test", "start_line": 45, "end_line": 58, "span_ids": ["TestOrientationdCase.test_orntd_2d"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOrientationdCase(unittest.TestCase):\n\n def test_orntd_2d(self):\n data = {\n \"seg\": np.ones((2, 1, 3)),\n \"img\": np.ones((2, 1, 3)),\n \"seg_meta_dict\": {\"affine\": np.eye(4)},\n \"img_meta_dict\": {\"affine\": np.eye(4)},\n }\n ornt = Orientationd(keys=(\"img\", \"seg\"), axcodes=\"PLI\")\n res = ornt(data)\n np.testing.assert_allclose(res[\"img\"].shape, (2, 3, 1))\n code = nib.aff2axcodes(res[\"seg_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"P\", \"L\", \"S\"))\n code = nib.aff2axcodes(res[\"img_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"P\", \"L\", \"S\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_1d_TestOrientationdCase.test_orntd_1d.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_1d_TestOrientationdCase.test_orntd_1d.None_2", "embedding": null, "metadata": {"file_path": "tests/test_orientationd.py", "file_name": "test_orientationd.py", "file_type": "text/x-python", "category": "test", "start_line": 60, "end_line": 73, "span_ids": ["TestOrientationdCase.test_orntd_1d"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOrientationdCase(unittest.TestCase):\n\n def test_orntd_1d(self):\n data = {\n \"seg\": np.ones((2, 3)),\n \"img\": np.ones((2, 3)),\n \"seg_meta_dict\": {\"affine\": np.eye(4)},\n \"img_meta_dict\": {\"affine\": np.eye(4)},\n }\n ornt = Orientationd(keys=(\"img\", \"seg\"), axcodes=\"L\")\n res = ornt(data)\n np.testing.assert_allclose(res[\"img\"].shape, (2, 3))\n code = nib.aff2axcodes(res[\"seg_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"L\", \"A\", \"S\"))\n code = nib.aff2axcodes(res[\"img_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"L\", \"A\", \"S\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_canonical_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_orientationd.py_TestOrientationdCase.test_orntd_canonical_", "embedding": null, "metadata": {"file_path": "tests/test_orientationd.py", "file_name": "test_orientationd.py", "file_type": "text/x-python", "category": "test", "start_line": 75, "end_line": 94, "span_ids": ["impl", "TestOrientationdCase.test_orntd_canonical"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestOrientationdCase(unittest.TestCase):\n\n def test_orntd_canonical(self):\n data = {\n \"seg\": np.ones((2, 1, 2, 3)),\n \"img\": np.ones((2, 1, 2, 3)),\n \"seg_meta_dict\": {\"affine\": np.eye(4)},\n \"img_meta_dict\": {\"affine\": np.eye(4)},\n }\n ornt = Orientationd(keys=(\"img\", \"seg\"), as_closest_canonical=True)\n res = ornt(data)\n np.testing.assert_allclose(res[\"img\"].shape, (2, 1, 2, 3))\n np.testing.assert_allclose(res[\"seg\"].shape, (2, 1, 2, 3))\n code = nib.aff2axcodes(res[\"seg_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"R\", \"A\", \"S\"))\n code = nib.aff2axcodes(res[\"img_meta_dict\"][\"affine\"], ornt.ornt_transform.labels)\n self.assertEqual(code, (\"R\", \"A\", \"S\"))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_plot_2d_or_3d_image.py_glob_TEST_CASE_5._1_3_10_10_10_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_plot_2d_or_3d_image.py_glob_TEST_CASE_5._1_3_10_10_10_", "embedding": null, "metadata": {"file_path": "tests/test_plot_2d_or_3d_image.py", "file_name": "test_plot_2d_or_3d_image.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 30, "span_ids": ["docstring"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import glob\nimport tempfile\nimport unittest\n\nimport torch\nfrom parameterized import parameterized\nfrom torch.utils.tensorboard import SummaryWriter\n\nfrom monai.visualize import plot_2d_or_3d_image\n\nTEST_CASE_1 = [(1, 1, 10, 10)]\n\nTEST_CASE_2 = [(1, 3, 10, 10)]\n\nTEST_CASE_3 = [(1, 4, 10, 10)]\n\nTEST_CASE_4 = [(1, 1, 10, 10, 10)]\n\nTEST_CASE_5 = [(1, 3, 10, 10, 10)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_plot_2d_or_3d_image.py_TestPlot2dOr3dImage_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_plot_2d_or_3d_image.py_TestPlot2dOr3dImage_", "embedding": null, "metadata": {"file_path": "tests/test_plot_2d_or_3d_image.py", "file_name": "test_plot_2d_or_3d_image.py", "file_type": "text/x-python", "category": "test", "start_line": 33, "end_line": 46, "span_ids": ["impl:11", "TestPlot2dOr3dImage", "TestPlot2dOr3dImage.test_tb_image_shape"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPlot2dOr3dImage(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_tb_image_shape(self, shape):\n with tempfile.TemporaryDirectory() as tempdir:\n writer = SummaryWriter(log_dir=tempdir)\n plot_2d_or_3d_image(torch.zeros(shape), 0, writer)\n writer.flush()\n writer.close()\n self.assertTrue(len(glob.glob(tempdir)) > 0)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_rw.py_os_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_rw.py_os_", "embedding": null, "metadata": {"file_path": "tests/test_png_rw.py", "file_name": "test_png_rw.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 79, "span_ids": ["TestPngWrite.test_write_output_shape", "impl", "TestPngWrite.test_write_gray", "docstring", "TestPngWrite.test_write_gray_1height", "TestPngWrite.test_write_2channels", "TestPngWrite", "TestPngWrite.test_write_rgb", "TestPngWrite.test_write_gray_1channel"], "tokens": 629}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport numpy as np\nfrom PIL import Image\n\nfrom monai.data import write_png\n\n\nclass TestPngWrite(unittest.TestCase):\n def test_write_gray(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.png\")\n img = np.random.rand(2, 3)\n img_save_val = (255 * img).astype(np.uint8)\n write_png(img, image_name, scale=255)\n out = np.asarray(Image.open(image_name))\n np.testing.assert_allclose(out, img_save_val)\n\n def test_write_gray_1height(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.png\")\n img = np.random.rand(1, 3)\n img_save_val = (65535 * img).astype(np.uint16)\n write_png(img, image_name, scale=65535)\n out = np.asarray(Image.open(image_name))\n np.testing.assert_allclose(out, img_save_val)\n\n def test_write_gray_1channel(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.png\")\n img = np.random.rand(2, 3, 1)\n img_save_val = (255 * img).astype(np.uint8).squeeze(2)\n write_png(img, image_name, scale=255)\n out = np.asarray(Image.open(image_name))\n np.testing.assert_allclose(out, img_save_val)\n\n def test_write_rgb(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.png\")\n img = np.random.rand(2, 3, 3)\n img_save_val = (255 * img).astype(np.uint8)\n write_png(img, image_name, scale=255)\n out = np.asarray(Image.open(image_name))\n np.testing.assert_allclose(out, img_save_val)\n\n def test_write_2channels(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.png\")\n img = np.random.rand(2, 3, 2)\n img_save_val = (255 * img).astype(np.uint8)\n write_png(img, image_name, scale=255)\n out = np.asarray(Image.open(image_name))\n np.testing.assert_allclose(out, img_save_val)\n\n def test_write_output_shape(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.png\")\n img = np.random.rand(2, 2, 3)\n write_png(img, image_name, (4, 4), scale=255)\n out = np.asarray(Image.open(image_name))\n np.testing.assert_allclose(out.shape, (4, 4, 3))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_query_memory.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_query_memory.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_query_memory.py", "file_name": "test_query_memory.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 27, "span_ids": ["impl", "TestQueryMemory", "TestQueryMemory.test_output_str", "docstring"], "tokens": 75}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nfrom tests.utils import query_memory\n\n\nclass TestQueryMemory(unittest.TestCase):\n def test_output_str(self):\n self.assertTrue(isinstance(query_memory(2), str))\n all_device = query_memory(-1)\n self.assertTrue(isinstance(all_device, str))\n self.assertEqual(query_memory(\"test\"), \"\")\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_adjust_contrast.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_adjust_contrast.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_adjust_contrast.py", "file_name": "test_rand_adjust_contrast.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["TestRandAdjustContrast", "TestRandAdjustContrast.test_correct_results", "impl:5", "docstring"], "tokens": 223}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandAdjustContrast\nfrom tests.utils import NumpyImageTestCase2D\n\nTEST_CASE_1 = [(0.5, 4.5)]\n\nTEST_CASE_2 = [1.5]\n\n\nclass TestRandAdjustContrast(NumpyImageTestCase2D):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_correct_results(self, gamma):\n adjuster = RandAdjustContrast(prob=1.0, gamma=gamma)\n result = adjuster(self.imt)\n epsilon = 1e-7\n img_min = self.imt.min()\n img_range = self.imt.max() - img_min\n expected = (\n np.power(((self.imt - img_min) / float(img_range + epsilon)), adjuster.gamma_value) * img_range + img_min\n )\n np.testing.assert_allclose(expected, result, rtol=1e-05)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_adjust_contrastd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_adjust_contrastd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_adjust_contrastd.py", "file_name": "test_rand_adjust_contrastd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["impl:5", "TestRandAdjustContrastd.test_correct_results", "TestRandAdjustContrastd", "docstring"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandAdjustContrastd\nfrom tests.utils import NumpyImageTestCase2D\n\nTEST_CASE_1 = [(0.5, 4.5)]\n\nTEST_CASE_2 = [1.5]\n\n\nclass TestRandAdjustContrastd(NumpyImageTestCase2D):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_correct_results(self, gamma):\n adjuster = RandAdjustContrastd(\"img\", prob=1.0, gamma=gamma)\n result = adjuster({\"img\": self.imt})\n epsilon = 1e-7\n img_min = self.imt.min()\n img_range = self.imt.max() - img_min\n expected = (\n np.power(((self.imt - img_min) / float(img_range + epsilon)), adjuster.gamma_value) * img_range + img_min\n )\n np.testing.assert_allclose(expected, result[\"img\"], rtol=1e-05)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_rand_affine.py", "file_name": "test_rand_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 68, "span_ids": ["docstring"], "tokens": 616}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandAffine\n\nTEST_CASES = [\n [\n dict(as_tensor_output=False, device=None),\n {\"img\": torch.arange(27).reshape((3, 3, 3))},\n np.arange(27).reshape((3, 3, 3)),\n ],\n [\n dict(as_tensor_output=False, device=None, spatial_size=-1),\n {\"img\": torch.arange(27).reshape((3, 3, 3))},\n np.arange(27).reshape((3, 3, 3)),\n ],\n [\n dict(as_tensor_output=False, device=None),\n {\"img\": torch.arange(27).reshape((3, 3, 3)), \"spatial_size\": (2, 2)},\n np.array([[[2.0, 3.0], [5.0, 6.0]], [[11.0, 12.0], [14.0, 15.0]], [[20.0, 21.0], [23.0, 24.0]]]),\n ],\n [\n dict(as_tensor_output=True, device=None),\n {\"img\": torch.ones((1, 3, 3, 3)), \"spatial_size\": (2, 2, 2)},\n torch.ones((1, 2, 2, 2)),\n ],\n [\n dict(\n prob=0.9,\n rotate_range=(np.pi / 2,),\n shear_range=[1, 2],\n translate_range=[2, 1],\n as_tensor_output=True,\n padding_mode=\"zeros\",\n spatial_size=(2, 2, 2),\n device=None,\n ),\n {\"img\": torch.ones((1, 3, 3, 3)), \"mode\": \"bilinear\"},\n torch.tensor([[[[0.3658, 1.0000], [1.0000, 1.0000]], [[1.0000, 1.0000], [1.0000, 0.9333]]]]),\n ],\n [\n dict(\n prob=0.9,\n rotate_range=(np.pi / 2,),\n shear_range=[1, 2],\n translate_range=[2, 1],\n scale_range=[0.1, 0.2],\n as_tensor_output=True,\n device=None,\n ),\n {\"img\": torch.arange(64).reshape((1, 8, 8)), \"spatial_size\": (3, 3)},\n torch.tensor([[[18.7362, 15.5820, 12.4278], [27.3988, 24.2446, 21.0904], [36.0614, 32.9072, 29.7530]]]),\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine.py_TestRandAffine_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine.py_TestRandAffine_", "embedding": null, "metadata": {"file_path": "tests/test_rand_affine.py", "file_name": "test_rand_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 71, "end_line": 86, "span_ids": ["TestRandAffine", "impl:3", "TestRandAffine.test_rand_affine"], "tokens": 157}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandAffine(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_affine(self, input_param, input_data, expected_val):\n g = RandAffine(**input_param)\n g.set_random_state(123)\n result = g(**input_data)\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine_grid.py_unittest_TEST_CASES": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine_grid.py_unittest_TEST_CASES", "embedding": null, "metadata": {"file_path": "tests/test_rand_affine_grid.py", "file_name": "test_rand_affine_grid.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 181, "span_ids": ["docstring"], "tokens": 36}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandAffineGrid\n\nTEST_CASES =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine_grid.py_TestRandAffineGrid_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affine_grid.py_TestRandAffineGrid_", "embedding": null, "metadata": {"file_path": "tests/test_rand_affine_grid.py", "file_name": "test_rand_affine_grid.py", "file_type": "text/x-python", "category": "test", "start_line": 184, "end_line": 199, "span_ids": ["TestRandAffineGrid.test_rand_affine_grid", "impl:3", "TestRandAffineGrid"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandAffineGrid(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_affine_grid(self, input_param, input_data, expected_val):\n g = RandAffineGrid(**input_param)\n g.set_random_state(123)\n result = g(**input_data)\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affined.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affined.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_rand_affined.py", "file_name": "test_rand_affined.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 138, "span_ids": ["docstring"], "tokens": 1358}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandAffined\nfrom monai.utils import GridSampleMode\n\nTEST_CASES = [\n [\n dict(as_tensor_output=False, device=None, spatial_size=None, keys=(\"img\", \"seg\")),\n {\"img\": torch.arange(27).reshape((3, 3, 3)), \"seg\": torch.arange(27).reshape((3, 3, 3))},\n np.arange(27).reshape((3, 3, 3)),\n ],\n [\n dict(as_tensor_output=False, device=None, spatial_size=(2, 2), keys=(\"img\", \"seg\")),\n {\"img\": torch.ones((3, 3, 3)), \"seg\": torch.ones((3, 3, 3))},\n np.ones((3, 2, 2)),\n ],\n [\n dict(as_tensor_output=True, device=None, spatial_size=(2, 2, 2), keys=(\"img\", \"seg\")),\n {\"img\": torch.ones((1, 3, 3, 3)), \"seg\": torch.ones((1, 3, 3, 3))},\n torch.ones((1, 2, 2, 2)),\n ],\n [\n dict(\n prob=0.9,\n rotate_range=(np.pi / 2,),\n shear_range=[1, 2],\n translate_range=[2, 1],\n as_tensor_output=True,\n spatial_size=(2, 2, 2),\n padding_mode=\"zeros\",\n device=None,\n keys=(\"img\", \"seg\"),\n mode=\"bilinear\",\n ),\n {\"img\": torch.ones((1, 3, 3, 3)), \"seg\": torch.ones((1, 3, 3, 3))},\n torch.tensor([[[[0.3658, 1.0000], [1.0000, 1.0000]], [[1.0000, 1.0000], [1.0000, 0.9333]]]]),\n ],\n [\n dict(\n prob=0.9,\n rotate_range=(np.pi / 2,),\n shear_range=[1, 2],\n translate_range=[2, 1],\n scale_range=[0.1, 0.2],\n as_tensor_output=True,\n spatial_size=(3, 3),\n keys=(\"img\", \"seg\"),\n device=None,\n ),\n {\"img\": torch.arange(64).reshape((1, 8, 8)), \"seg\": torch.arange(64).reshape((1, 8, 8))},\n torch.tensor([[[18.7362, 15.5820, 12.4278], [27.3988, 24.2446, 21.0904], [36.0614, 32.9072, 29.7530]]]),\n ],\n [\n dict(\n prob=0.9,\n mode=(\"bilinear\", \"nearest\"),\n rotate_range=(np.pi / 2,),\n shear_range=[1, 2],\n translate_range=[2, 1],\n scale_range=[0.1, 0.2],\n as_tensor_output=False,\n spatial_size=(3, 3),\n keys=(\"img\", \"seg\"),\n device=torch.device(\"cpu:0\"),\n ),\n {\"img\": torch.arange(64).reshape((1, 8, 8)), \"seg\": torch.arange(64).reshape((1, 8, 8))},\n {\n \"img\": np.array(\n [\n [\n [18.736153, 15.581954, 12.4277525],\n [27.398798, 24.244598, 21.090399],\n [36.061443, 32.90724, 29.753046],\n ]\n ]\n ),\n \"seg\": np.array([[[19.0, 20.0, 12.0], [27.0, 28.0, 20.0], [35.0, 36.0, 29.0]]]),\n },\n ],\n [\n dict(\n prob=0.9,\n rotate_range=(np.pi / 2,),\n shear_range=[1, 2],\n translate_range=[2, 1],\n as_tensor_output=True,\n spatial_size=(2, 2, 2),\n padding_mode=\"zeros\",\n device=None,\n keys=(\"img\", \"seg\"),\n mode=GridSampleMode.BILINEAR,\n ),\n {\"img\": torch.ones((1, 3, 3, 3)), \"seg\": torch.ones((1, 3, 3, 3))},\n torch.tensor([[[[0.3658, 1.0000], [1.0000, 1.0000]], [[1.0000, 1.0000], [1.0000, 0.9333]]]]),\n ],\n [\n dict(\n prob=0.9,\n mode=(GridSampleMode.BILINEAR, GridSampleMode.NEAREST),\n rotate_range=(np.pi / 2,),\n shear_range=[1, 2],\n translate_range=[2, 1],\n scale_range=[0.1, 0.2],\n as_tensor_output=False,\n spatial_size=(3, 3),\n keys=(\"img\", \"seg\"),\n device=torch.device(\"cpu:0\"),\n ),\n {\"img\": torch.arange(64).reshape((1, 8, 8)), \"seg\": torch.arange(64).reshape((1, 8, 8))},\n {\n \"img\": np.array(\n [\n [\n [18.736153, 15.581954, 12.4277525],\n [27.398798, 24.244598, 21.090399],\n [36.061443, 32.90724, 29.753046],\n ]\n ]\n ),\n \"seg\": np.array([[[19.0, 20.0, 12.0], [27.0, 28.0, 20.0], [35.0, 36.0, 29.0]]]),\n },\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affined.py_TestRandAffined_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_affined.py_TestRandAffined_", "embedding": null, "metadata": {"file_path": "tests/test_rand_affined.py", "file_name": "test_rand_affined.py", "file_type": "text/x-python", "category": "test", "start_line": 141, "end_line": 160, "span_ids": ["TestRandAffined", "TestRandAffined.test_rand_affined", "impl:3"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandAffined(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_affined(self, input_param, input_data, expected_val):\n g = RandAffined(**input_param).set_random_state(123)\n res = g(input_data)\n for key in res:\n result = res[key]\n if \"_transforms\" in key:\n continue\n expected = expected_val[key] if isinstance(expected_val, dict) else expected_val\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_label.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_label.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_crop_by_pos_neg_label.py", "file_name": "test_rand_crop_by_pos_neg_label.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 79, "span_ids": ["TestRandCropByPosNegLabel", "TestRandCropByPosNegLabel.test_type_shape", "impl:7", "docstring"], "tokens": 617}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandCropByPosNegLabel\n\nTEST_CASE_0 = [\n {\n \"label\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"spatial_size\": [2, 2, -1],\n \"pos\": 1,\n \"neg\": 1,\n \"num_samples\": 2,\n \"image\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"image_threshold\": 0,\n },\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n list,\n (3, 2, 2, 3),\n]\n\nTEST_CASE_1 = [\n {\n \"label\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"spatial_size\": [2, 2, 2],\n \"pos\": 1,\n \"neg\": 1,\n \"num_samples\": 2,\n \"image\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"image_threshold\": 0,\n },\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n list,\n (3, 2, 2, 2),\n]\n\nTEST_CASE_2 = [\n {\n \"label\": None,\n \"spatial_size\": [2, 2, 2],\n \"pos\": 1,\n \"neg\": 1,\n \"num_samples\": 2,\n \"image\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"image_threshold\": 0,\n },\n {\n \"img\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"label\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"image\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n },\n list,\n (3, 2, 2, 2),\n]\n\n\nclass TestRandCropByPosNegLabel(unittest.TestCase):\n @parameterized.expand([TEST_CASE_0, TEST_CASE_1, TEST_CASE_2])\n def test_type_shape(self, input_param, input_data, expected_type, expected_shape):\n result = RandCropByPosNegLabel(**input_param)(**input_data)\n self.assertIsInstance(result, expected_type)\n self.assertTupleEqual(result[0].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_labeld.py_unittest_TEST_CASE_2._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_labeld.py_unittest_TEST_CASE_2._", "embedding": null, "metadata": {"file_path": "tests/test_rand_crop_by_pos_neg_labeld.py", "file_name": "test_rand_crop_by_pos_neg_labeld.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 83, "span_ids": ["impl:5", "docstring"], "tokens": 614}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandCropByPosNegLabeld\n\nTEST_CASE_0 = [\n {\n \"keys\": [\"image\", \"extral\", \"label\"],\n \"label_key\": \"label\",\n \"spatial_size\": [-1, 2, 2],\n \"pos\": 1,\n \"neg\": 1,\n \"num_samples\": 2,\n \"image_key\": None,\n \"image_threshold\": 0,\n },\n {\n \"image\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"extral\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"label\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"affine\": np.eye(3),\n \"shape\": \"CHWD\",\n },\n list,\n (3, 3, 2, 2),\n]\n\nTEST_CASE_1 = [\n {\n \"keys\": [\"image\", \"extral\", \"label\"],\n \"label_key\": \"label\",\n \"spatial_size\": [2, 2, 2],\n \"pos\": 1,\n \"neg\": 1,\n \"num_samples\": 2,\n \"image_key\": None,\n \"image_threshold\": 0,\n },\n {\n \"image\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"extral\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"label\": np.random.randint(0, 2, size=[3, 3, 3, 3]),\n \"affine\": np.eye(3),\n \"shape\": \"CHWD\",\n },\n list,\n (3, 2, 2, 2),\n]\n\nTEST_CASE_2 = [\n {\n \"keys\": [\"image\", \"extral\", \"label\"],\n \"label_key\": \"label\",\n \"spatial_size\": [2, 2, 2],\n \"pos\": 1,\n \"neg\": 1,\n \"num_samples\": 2,\n \"image_key\": None,\n \"image_threshold\": 0,\n },\n {\n \"image\": np.zeros([3, 3, 3, 3]) - 1,\n \"extral\": np.zeros([3, 3, 3, 3]),\n \"label\": np.ones([3, 3, 3, 3]),\n \"affine\": np.eye(3),\n \"shape\": \"CHWD\",\n },\n list,\n (3, 2, 2, 2),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_labeld.py_TestRandCropByPosNegLabeld_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_crop_by_pos_neg_labeld.py_TestRandCropByPosNegLabeld_", "embedding": null, "metadata": {"file_path": "tests/test_rand_crop_by_pos_neg_labeld.py", "file_name": "test_rand_crop_by_pos_neg_labeld.py", "file_type": "text/x-python", "category": "test", "start_line": 86, "end_line": 100, "span_ids": ["TestRandCropByPosNegLabeld", "TestRandCropByPosNegLabeld.test_type_shape", "impl:7"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandCropByPosNegLabeld(unittest.TestCase):\n @parameterized.expand([TEST_CASE_0, TEST_CASE_1, TEST_CASE_2])\n def test_type_shape(self, input_param, input_data, expected_type, expected_shape):\n result = RandCropByPosNegLabeld(**input_param)(input_data)\n self.assertIsInstance(result, expected_type)\n self.assertTupleEqual(result[0][\"image\"].shape, expected_shape)\n self.assertTupleEqual(result[0][\"extral\"].shape, expected_shape)\n self.assertTupleEqual(result[0][\"label\"].shape, expected_shape)\n for i, item in enumerate(result):\n self.assertEqual(item[\"image_meta_dict\"][\"patch_index\"], i)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_deform_grid.py_unittest_TEST_CASES": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_deform_grid.py_unittest_TEST_CASES", "embedding": null, "metadata": {"file_path": "tests/test_rand_deform_grid.py", "file_name": "test_rand_deform_grid.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 123, "span_ids": ["docstring"], "tokens": 36}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandDeformGrid\n\nTEST_CASES =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_deform_grid.py_TestRandDeformGrid_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_deform_grid.py_TestRandDeformGrid_", "embedding": null, "metadata": {"file_path": "tests/test_rand_deform_grid.py", "file_name": "test_rand_deform_grid.py", "file_type": "text/x-python", "category": "test", "start_line": 126, "end_line": 141, "span_ids": ["TestRandDeformGrid.test_rand_deform_grid", "impl:3", "TestRandDeformGrid"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandDeformGrid(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_deform_grid(self, input_param, input_data, expected_val):\n g = RandDeformGrid(**input_param)\n g.set_random_state(123)\n result = g(**input_data)\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_2d.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_2d.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_rand_elastic_2d.py", "file_name": "test_rand_elastic_2d.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 89, "span_ids": ["docstring"], "tokens": 818}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import Rand2DElastic\n\nTEST_CASES = [\n [\n {\"spacing\": (0.3, 0.3), \"magnitude_range\": (1.0, 2.0), \"prob\": 0.0, \"as_tensor_output\": False, \"device\": None},\n {\"img\": torch.ones((3, 3, 3)), \"spatial_size\": (2, 2)},\n np.ones((3, 2, 2)),\n ],\n [\n {\"spacing\": (0.3, 0.3), \"magnitude_range\": (1.0, 2.0), \"prob\": 0.0, \"as_tensor_output\": False, \"device\": None},\n {\"img\": torch.arange(27).reshape((3, 3, 3))},\n np.arange(27).reshape((3, 3, 3)),\n ],\n [\n {\n \"spacing\": (0.3, 0.3),\n \"magnitude_range\": (1.0, 2.0),\n \"prob\": 0.9,\n \"as_tensor_output\": False,\n \"device\": None,\n \"padding_mode\": \"zeros\",\n },\n {\"img\": torch.ones((3, 3, 3)), \"spatial_size\": (2, 2), \"mode\": \"bilinear\"},\n np.array(\n [\n [[0.45531988, 0.0], [0.0, 0.71558857]],\n [[0.45531988, 0.0], [0.0, 0.71558857]],\n [[0.45531988, 0.0], [0.0, 0.71558857]],\n ]\n ),\n ],\n [\n {\n \"spacing\": (1.0, 1.0),\n \"magnitude_range\": (1.0, 1.0),\n \"scale_range\": [1.2, 2.2],\n \"prob\": 0.9,\n \"padding_mode\": \"border\",\n \"as_tensor_output\": True,\n \"device\": None,\n \"spatial_size\": (2, 2),\n },\n {\"img\": torch.arange(27).reshape((3, 3, 3))},\n torch.tensor(\n [\n [[3.0793, 2.6141], [4.0568, 5.9978]],\n [[12.0793, 11.6141], [13.0568, 14.9978]],\n [[21.0793, 20.6141], [22.0568, 23.9978]],\n ]\n ),\n ],\n [\n {\n \"spacing\": (0.3, 0.3),\n \"magnitude_range\": (0.1, 0.2),\n \"translate_range\": [-0.01, 0.01],\n \"scale_range\": [0.01, 0.02],\n \"prob\": 0.9,\n \"as_tensor_output\": False,\n \"device\": \"cuda\" if torch.cuda.is_available() else \"cpu\",\n \"spatial_size\": (2, 2),\n },\n {\"img\": torch.arange(27).reshape((3, 3, 3))},\n np.array(\n [\n [[1.3584113, 1.9251312], [5.626623, 6.642721]],\n [[10.358411, 10.925131], [14.626623, 15.642721]],\n [[19.358412, 19.92513], [23.626623, 24.642721]],\n ]\n ),\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_2d.py_TestRand2DElastic_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_2d.py_TestRand2DElastic_", "embedding": null, "metadata": {"file_path": "tests/test_rand_elastic_2d.py", "file_name": "test_rand_elastic_2d.py", "file_type": "text/x-python", "category": "test", "start_line": 92, "end_line": 107, "span_ids": ["impl:3", "TestRand2DElastic.test_rand_2d_elastic", "TestRand2DElastic"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRand2DElastic(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_2d_elastic(self, input_param, input_data, expected_val):\n g = Rand2DElastic(**input_param)\n g.set_random_state(123)\n result = g(**input_data)\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_3d.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_3d.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_rand_elastic_3d.py", "file_name": "test_rand_elastic_3d.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 68, "span_ids": ["docstring"], "tokens": 593}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import Rand3DElastic\n\nTEST_CASES = [\n [\n {\n \"magnitude_range\": (0.3, 2.3),\n \"sigma_range\": (1.0, 20.0),\n \"prob\": 0.0,\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": -1,\n },\n {\"img\": torch.arange(72).reshape((2, 3, 3, 4))},\n np.arange(72).reshape((2, 3, 3, 4)),\n ],\n [\n {\n \"magnitude_range\": (0.3, 2.3),\n \"sigma_range\": (1.0, 20.0),\n \"prob\": 0.0,\n \"as_tensor_output\": False,\n \"device\": None,\n },\n {\"img\": torch.ones((2, 3, 3, 3)), \"spatial_size\": (2, 2, 2)},\n np.ones((2, 2, 2, 2)),\n ],\n [\n {\n \"magnitude_range\": (0.3, 0.3),\n \"sigma_range\": (1.0, 2.0),\n \"prob\": 0.9,\n \"as_tensor_output\": False,\n \"device\": None,\n },\n {\"img\": torch.arange(27).reshape((1, 3, 3, 3)), \"spatial_size\": (2, 2, 2)},\n np.array([[[[6.4939356, 7.50289], [9.518351, 10.522849]], [[15.512375, 16.523542], [18.531467, 19.53646]]]]),\n ],\n [\n {\n \"magnitude_range\": (0.3, 0.3),\n \"sigma_range\": (1.0, 2.0),\n \"prob\": 0.9,\n \"rotate_range\": [1, 1, 1],\n \"as_tensor_output\": False,\n \"device\": \"cuda\" if torch.cuda.is_available() else \"cpu\",\n \"spatial_size\": (2, 2, 2),\n },\n {\"img\": torch.arange(27).reshape((1, 3, 3, 3)), \"mode\": \"bilinear\"},\n np.array([[[[5.0069294, 9.463932], [9.287769, 13.739735]], [[12.319424, 16.777205], [16.594296, 21.045748]]]]),\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_3d.py_TestRand3DElastic_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elastic_3d.py_TestRand3DElastic_", "embedding": null, "metadata": {"file_path": "tests/test_rand_elastic_3d.py", "file_name": "test_rand_elastic_3d.py", "file_type": "text/x-python", "category": "test", "start_line": 71, "end_line": 86, "span_ids": ["impl:3", "TestRand3DElastic.test_rand_3d_elastic", "TestRand3DElastic"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRand3DElastic(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_3d_elastic(self, input_param, input_data, expected_val):\n g = Rand3DElastic(**input_param)\n g.set_random_state(123)\n result = g(**input_data)\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_2d.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_2d.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_rand_elasticd_2d.py", "file_name": "test_rand_elasticd_2d.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 135, "span_ids": ["docstring"], "tokens": 1291}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import Rand2DElasticd\n\nTEST_CASES = [\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"spacing\": (0.3, 0.3),\n \"magnitude_range\": (1.0, 2.0),\n \"prob\": 0.0,\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": (2, 2),\n },\n {\"img\": torch.ones((3, 3, 3)), \"seg\": torch.ones((3, 3, 3))},\n np.ones((3, 2, 2)),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"spacing\": (0.3, 0.3),\n \"magnitude_range\": (0.3, 0.3),\n \"prob\": 0.0,\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": -1,\n },\n {\"img\": torch.arange(4).reshape((1, 2, 2)), \"seg\": torch.arange(4).reshape((1, 2, 2))},\n np.arange(4).reshape((1, 2, 2)),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"spacing\": (0.3, 0.3),\n \"magnitude_range\": (1.0, 2.0),\n \"prob\": 0.9,\n \"as_tensor_output\": False,\n \"padding_mode\": \"zeros\",\n \"device\": None,\n \"spatial_size\": (2, 2),\n \"mode\": \"bilinear\",\n },\n {\"img\": torch.ones((3, 3, 3)), \"seg\": torch.ones((3, 3, 3))},\n np.array(\n [\n [[0.45531988, 0.0], [0.0, 0.71558857]],\n [[0.45531988, 0.0], [0.0, 0.71558857]],\n [[0.45531988, 0.0], [0.0, 0.71558857]],\n ]\n ),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"spacing\": (1.0, 1.0),\n \"magnitude_range\": (1.0, 1.0),\n \"scale_range\": [1.2, 2.2],\n \"prob\": 0.9,\n \"padding_mode\": \"border\",\n \"as_tensor_output\": True,\n \"device\": None,\n \"spatial_size\": (2, 2),\n },\n {\"img\": torch.arange(27).reshape((3, 3, 3)), \"seg\": torch.arange(27).reshape((3, 3, 3))},\n torch.tensor(\n [\n [[3.0793, 2.6141], [4.0568, 5.9978]],\n [[12.0793, 11.6141], [13.0568, 14.9978]],\n [[21.0793, 20.6141], [22.0568, 23.9978]],\n ]\n ),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"spacing\": (0.3, 0.3),\n \"magnitude_range\": (0.1, 0.2),\n \"translate_range\": [-0.01, 0.01],\n \"scale_range\": [0.01, 0.02],\n \"prob\": 0.9,\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": (2, 2),\n },\n {\"img\": torch.arange(27).reshape((3, 3, 3)), \"seg\": torch.arange(27).reshape((3, 3, 3))},\n np.array(\n [\n [[1.3584113, 1.9251312], [5.626623, 6.642721]],\n [[10.358411, 10.925131], [14.626623, 15.642721]],\n [[19.358412, 19.92513], [23.626623, 24.642721]],\n ]\n ),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"mode\": (\"bilinear\", \"nearest\"),\n \"spacing\": (0.3, 0.3),\n \"magnitude_range\": (0.1, 0.2),\n \"translate_range\": [-0.01, 0.01],\n \"scale_range\": [0.01, 0.02],\n \"prob\": 0.9,\n \"as_tensor_output\": True,\n \"device\": None,\n \"spatial_size\": (2, 2),\n },\n {\"img\": torch.arange(27).reshape((3, 3, 3)), \"seg\": torch.arange(27).reshape((3, 3, 3))},\n {\n \"img\": torch.tensor(\n [\n [[1.3584, 1.9251], [5.6266, 6.6427]],\n [[10.3584, 10.9251], [14.6266, 15.6427]],\n [[19.3584, 19.9251], [23.6266, 24.6427]],\n ]\n ),\n \"seg\": torch.tensor([[[0.0, 2.0], [6.0, 8.0]], [[9.0, 11.0], [15.0, 17.0]], [[18.0, 20.0], [24.0, 26.0]]]),\n },\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_2d.py_TestRand2DElasticd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_2d.py_TestRand2DElasticd_", "embedding": null, "metadata": {"file_path": "tests/test_rand_elasticd_2d.py", "file_name": "test_rand_elasticd_2d.py", "file_type": "text/x-python", "category": "test", "start_line": 138, "end_line": 156, "span_ids": ["TestRand2DElasticd.test_rand_2d_elasticd", "impl:3", "TestRand2DElasticd"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRand2DElasticd(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_2d_elasticd(self, input_param, input_data, expected_val):\n g = Rand2DElasticd(**input_param)\n g.set_random_state(123)\n res = g(input_data)\n for key in res:\n result = res[key]\n expected = expected_val[key] if isinstance(expected_val, dict) else expected_val\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_3d.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_3d.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_rand_elasticd_3d.py", "file_name": "test_rand_elasticd_3d.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 106, "span_ids": ["docstring"], "tokens": 1118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import Rand3DElasticd\n\nTEST_CASES = [\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"magnitude_range\": (0.3, 2.3),\n \"sigma_range\": (1.0, 20.0),\n \"prob\": 0.0,\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": (2, 2, 2),\n },\n {\"img\": torch.ones((2, 3, 3, 3)), \"seg\": torch.ones((2, 3, 3, 3))},\n np.ones((2, 2, 2, 2)),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"magnitude_range\": (0.3, 2.3),\n \"sigma_range\": (1.0, 20.0),\n \"prob\": 0.0,\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": (2, -1, -1),\n },\n {\"img\": torch.ones((2, 3, 3, 3)), \"seg\": torch.ones((2, 3, 3, 3))},\n np.ones((2, 2, 3, 3)),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"magnitude_range\": (0.3, 2.3),\n \"sigma_range\": (1.0, 20.0),\n \"prob\": 0.0,\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": -1,\n },\n {\"img\": torch.arange(8).reshape((1, 2, 2, 2)), \"seg\": torch.arange(8).reshape((1, 2, 2, 2))},\n np.arange(8).reshape((1, 2, 2, 2)),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"magnitude_range\": (0.3, 0.3),\n \"sigma_range\": (1.0, 2.0),\n \"prob\": 0.9,\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": (2, 2, 2),\n },\n {\"img\": torch.arange(27).reshape((1, 3, 3, 3)), \"seg\": torch.arange(27).reshape((1, 3, 3, 3))},\n np.array([[[[6.4939356, 7.50289], [9.518351, 10.522849]], [[15.512375, 16.523542], [18.531467, 19.53646]]]]),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"magnitude_range\": (0.3, 0.3),\n \"sigma_range\": (1.0, 2.0),\n \"prob\": 0.9,\n \"rotate_range\": [1, 1, 1],\n \"as_tensor_output\": False,\n \"device\": None,\n \"spatial_size\": (2, 2, 2),\n \"mode\": \"bilinear\",\n },\n {\"img\": torch.arange(27).reshape((1, 3, 3, 3)), \"seg\": torch.arange(27).reshape((1, 3, 3, 3))},\n np.array([[[[5.0069294, 9.463932], [9.287769, 13.739735]], [[12.319424, 16.777205], [16.594296, 21.045748]]]]),\n ],\n [\n {\n \"keys\": (\"img\", \"seg\"),\n \"mode\": (\"bilinear\", \"nearest\"),\n \"magnitude_range\": (0.3, 0.3),\n \"sigma_range\": (1.0, 2.0),\n \"prob\": 0.9,\n \"rotate_range\": [1, 1, 1],\n \"as_tensor_output\": True,\n \"device\": torch.device(\"cpu:0\"),\n \"spatial_size\": (2, 2, 2),\n },\n {\"img\": torch.arange(27).reshape((1, 3, 3, 3)), \"seg\": torch.arange(27).reshape((1, 3, 3, 3))},\n {\n \"img\": torch.tensor([[[[5.0069, 9.4639], [9.2878, 13.7397]], [[12.3194, 16.7772], [16.5943, 21.0457]]]]),\n \"seg\": torch.tensor([[[[4.0, 14.0], [7.0, 14.0]], [[9.0, 19.0], [12.0, 22.0]]]]),\n },\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_3d.py_TestRand3DElasticd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_elasticd_3d.py_TestRand3DElasticd_", "embedding": null, "metadata": {"file_path": "tests/test_rand_elasticd_3d.py", "file_name": "test_rand_elasticd_3d.py", "file_type": "text/x-python", "category": "test", "start_line": 109, "end_line": 127, "span_ids": ["impl:3", "TestRand3DElasticd.test_rand_3d_elasticd", "TestRand3DElasticd"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRand3DElasticd(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_3d_elasticd(self, input_param, input_data, expected_val):\n g = Rand3DElasticd(**input_param)\n g.set_random_state(123)\n res = g(input_data)\n for key in res:\n result = res[key]\n expected = expected_val[key] if isinstance(expected_val, dict) else expected_val\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_flip.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_flip.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_flip.py", "file_name": "test_rand_flip.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["TestRandFlip.test_correct_results", "TestRandFlip", "TestRandFlip.test_invalid_inputs", "impl:5", "docstring"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandFlip\nfrom tests.utils import NumpyImageTestCase2D\n\nINVALID_CASES = [(\"wrong_axis\", [\"s\", 1], TypeError), (\"not_numbers\", \"s\", TypeError)]\n\nVALID_CASES = [(\"no_axis\", None), (\"one_axis\", 1), (\"many_axis\", [0, 1])]\n\n\nclass TestRandFlip(NumpyImageTestCase2D):\n @parameterized.expand(INVALID_CASES)\n def test_invalid_inputs(self, _, spatial_axis, raises):\n with self.assertRaises(raises):\n flip = RandFlip(prob=1.0, spatial_axis=spatial_axis)\n flip(self.imt[0])\n\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, _, spatial_axis):\n flip = RandFlip(prob=1.0, spatial_axis=spatial_axis)\n expected = []\n for channel in self.imt[0]:\n expected.append(np.flip(channel, spatial_axis))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(expected, flip(self.imt[0])))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_flipd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_flipd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_flipd.py", "file_name": "test_rand_flipd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 37, "span_ids": ["TestRandFlipd.test_correct_results", "impl:3", "TestRandFlipd", "docstring"], "tokens": 188}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandFlipd\nfrom tests.utils import NumpyImageTestCase2D\n\nVALID_CASES = [(\"no_axis\", None), (\"one_axis\", 1), (\"many_axis\", [0, 1])]\n\n\nclass TestRandFlipd(NumpyImageTestCase2D):\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, _, spatial_axis):\n flip = RandFlipd(keys=\"img\", prob=1.0, spatial_axis=spatial_axis)\n res = flip({\"img\": self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.flip(channel, spatial_axis))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(expected, res[\"img\"]))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate.py_TestRandRotate3D_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate.py_TestRandRotate3D_", "embedding": null, "metadata": {"file_path": "tests/test_rand_rotate.py", "file_name": "test_rand_rotate.py", "file_type": "text/x-python", "category": "test", "start_line": 59, "end_line": 104, "span_ids": ["TestRandRotate3D", "TestRandRotate3D.test_correct_results", "impl"], "tokens": 372}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandRotate3D(NumpyImageTestCase3D):\n @parameterized.expand(\n [\n (np.pi / 2, -np.pi / 6, (0.0, np.pi), False, \"bilinear\", \"border\", False, (1, 87, 104, 109)),\n (\n np.pi / 4,\n (-np.pi / 9, np.pi / 4.5),\n (np.pi / 9, np.pi / 6),\n False,\n \"nearest\",\n \"border\",\n True,\n (1, 89, 105, 104),\n ),\n (\n 0.0,\n (2 * np.pi, 2.06 * np.pi),\n (-np.pi / 180, np.pi / 180),\n True,\n \"nearest\",\n \"zeros\",\n True,\n (1, 48, 64, 80),\n ),\n ((-np.pi / 4, 0), 0, 0, False, \"nearest\", \"zeros\", False, (1, 48, 77, 90)),\n ]\n )\n def test_correct_results(self, x, y, z, keep_size, mode, padding_mode, align_corners, expected):\n rotate_fn = RandRotate(\n range_x=x,\n range_y=y,\n range_z=z,\n prob=1.0,\n keep_size=keep_size,\n mode=mode,\n padding_mode=padding_mode,\n align_corners=align_corners,\n )\n rotate_fn.set_random_state(243)\n rotated = rotate_fn(self.imt[0])\n np.testing.assert_allclose(rotated.shape, expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_rotate90.py", "file_name": "test_rand_rotate90.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 64, "span_ids": ["TestRandRotate90", "impl", "TestRandRotate90.test_spatial_axes", "TestRandRotate90.test_prob_k_spatial_axes", "TestRandRotate90.test_k", "docstring", "TestRandRotate90.test_default"], "tokens": 408}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import RandRotate90\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestRandRotate90(NumpyImageTestCase2D):\n def test_default(self):\n rotate = RandRotate90()\n rotate.set_random_state(123)\n rotated = rotate(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 0, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated, expected))\n\n def test_k(self):\n rotate = RandRotate90(max_k=2)\n rotate.set_random_state(234)\n rotated = rotate(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 0, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated, expected))\n\n def test_spatial_axes(self):\n rotate = RandRotate90(spatial_axes=(0, 1))\n rotate.set_random_state(234)\n rotated = rotate(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 0, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated, expected))\n\n def test_prob_k_spatial_axes(self):\n rotate = RandRotate90(prob=1.0, max_k=2, spatial_axes=(0, 1))\n rotate.set_random_state(234)\n rotated = rotate(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 1, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated, expected))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90d.py_unittest_TestRandRotate90d.test_spatial_axes.self_assertTrue_np_allclo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90d.py_unittest_TestRandRotate90d.test_spatial_axes.self_assertTrue_np_allclo", "embedding": null, "metadata": {"file_path": "tests/test_rand_rotate90d.py", "file_name": "test_rand_rotate90d.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 52, "span_ids": ["TestRandRotate90d.test_spatial_axes", "TestRandRotate90d", "TestRandRotate90d.test_default", "docstring", "TestRandRotate90d.test_k"], "tokens": 335}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import RandRotate90d\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestRandRotate90d(NumpyImageTestCase2D):\n def test_default(self):\n key = None\n rotate = RandRotate90d(keys=key)\n rotate.set_random_state(123)\n rotated = rotate({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 0, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated[key], expected))\n\n def test_k(self):\n key = \"test\"\n rotate = RandRotate90d(keys=key, max_k=2)\n rotate.set_random_state(234)\n rotated = rotate({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 0, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated[key], expected))\n\n def test_spatial_axes(self):\n key = \"test\"\n rotate = RandRotate90d(keys=key, spatial_axes=(0, 1))\n rotate.set_random_state(234)\n rotated = rotate({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 0, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated[key], expected))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90d.py_TestRandRotate90d.test_prob_k_spatial_axes_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotate90d.py_TestRandRotate90d.test_prob_k_spatial_axes_", "embedding": null, "metadata": {"file_path": "tests/test_rand_rotate90d.py", "file_name": "test_rand_rotate90d.py", "file_type": "text/x-python", "category": "test", "start_line": 54, "end_line": 74, "span_ids": ["TestRandRotate90d.test_prob_k_spatial_axes", "TestRandRotate90d.test_no_key", "impl"], "tokens": 207}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandRotate90d(NumpyImageTestCase2D):\n\n def test_prob_k_spatial_axes(self):\n key = \"test\"\n rotate = RandRotate90d(keys=key, prob=1.0, max_k=2, spatial_axes=(0, 1))\n rotate.set_random_state(234)\n rotated = rotate({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 1, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated[key], expected))\n\n def test_no_key(self):\n key = \"unknown\"\n rotate = RandRotate90d(keys=key, prob=1.0, max_k=2, spatial_axes=(0, 1))\n with self.assertRaisesRegex(KeyError, \"\"):\n rotated = rotate({\"test\": self.imt[0]})\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotated.py_TestRandRotated3D_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_rotated.py_TestRandRotated3D_", "embedding": null, "metadata": {"file_path": "tests/test_rand_rotated.py", "file_name": "test_rand_rotated.py", "file_type": "text/x-python", "category": "test", "start_line": 61, "end_line": 138, "span_ids": ["TestRandRotated3D", "TestRandRotated3D.test_correct_shapes", "impl"], "tokens": 656}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandRotated3D(NumpyImageTestCase3D):\n @parameterized.expand(\n [\n (np.pi / 2, -np.pi / 6, (0.0, np.pi), False, \"bilinear\", \"border\", False, (1, 87, 104, 109)),\n (\n np.pi / 2,\n -np.pi / 6,\n (0.0, np.pi),\n False,\n GridSampleMode.NEAREST,\n GridSamplePadMode.BORDER,\n False,\n (1, 87, 104, 109),\n ),\n (\n np.pi / 4,\n (-np.pi / 9, np.pi / 4.5),\n (np.pi / 9, np.pi / 6),\n False,\n \"nearest\",\n \"border\",\n True,\n (1, 89, 105, 104),\n ),\n (\n np.pi / 4,\n (-np.pi / 9, np.pi / 4.5),\n (np.pi / 9, np.pi / 6),\n False,\n GridSampleMode.NEAREST,\n GridSamplePadMode.BORDER,\n True,\n (1, 89, 105, 104),\n ),\n (\n 0.0,\n (2 * np.pi, 2.06 * np.pi),\n (-np.pi / 180, np.pi / 180),\n True,\n \"nearest\",\n \"zeros\",\n True,\n (1, 48, 64, 80),\n ),\n (\n 0.0,\n (2 * np.pi, 2.06 * np.pi),\n (-np.pi / 180, np.pi / 180),\n True,\n GridSampleMode.NEAREST,\n GridSamplePadMode.ZEROS,\n True,\n (1, 48, 64, 80),\n ),\n ((-np.pi / 4, 0), 0, 0, False, \"nearest\", \"zeros\", False, (1, 48, 77, 90)),\n ((-np.pi / 4, 0), 0, 0, False, GridSampleMode.NEAREST, GridSamplePadMode.ZEROS, False, (1, 48, 77, 90)),\n ]\n )\n def test_correct_shapes(self, x, y, z, keep_size, mode, padding_mode, align_corners, expected):\n rotate_fn = RandRotated(\n \"img\",\n range_x=x,\n range_y=y,\n range_z=z,\n prob=1.0,\n keep_size=keep_size,\n mode=mode,\n padding_mode=padding_mode,\n align_corners=align_corners,\n )\n rotate_fn.set_random_state(243)\n rotated = rotate_fn({\"img\": self.imt[0], \"seg\": self.segn[0]})\n np.testing.assert_allclose(rotated[\"img\"].shape, expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_scale_intensity.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_scale_intensity.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_scale_intensity.py", "file_name": "test_rand_scale_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 32, "span_ids": ["TestRandScaleIntensity", "TestRandScaleIntensity.test_value", "impl", "docstring"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import RandScaleIntensity\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestRandScaleIntensity(NumpyImageTestCase2D):\n def test_value(self):\n scaler = RandScaleIntensity(factors=0.5, prob=1.0)\n scaler.set_random_state(seed=0)\n result = scaler(self.imt)\n np.random.seed(0)\n expected = (self.imt * (1 + np.random.uniform(low=-0.5, high=0.5))).astype(np.float32)\n np.testing.assert_allclose(result, expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_scale_intensityd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_scale_intensityd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_scale_intensityd.py", "file_name": "test_rand_scale_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 33, "span_ids": ["TestRandScaleIntensityd", "TestRandScaleIntensityd.test_value", "impl", "docstring"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import RandScaleIntensityd\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestRandScaleIntensityd(NumpyImageTestCase2D):\n def test_value(self):\n key = \"img\"\n scaler = RandScaleIntensityd(keys=[key], factors=0.5, prob=1.0)\n scaler.set_random_state(seed=0)\n result = scaler({key: self.imt})\n np.random.seed(0)\n expected = (self.imt * (1 + np.random.uniform(low=-0.5, high=0.5))).astype(np.float32)\n np.testing.assert_allclose(result[key], expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_shift_intensity.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_shift_intensity.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_shift_intensity.py", "file_name": "test_rand_shift_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 32, "span_ids": ["impl", "TestRandShiftIntensity", "TestRandShiftIntensity.test_value", "docstring"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import RandShiftIntensity\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestRandShiftIntensity(NumpyImageTestCase2D):\n def test_value(self):\n shifter = RandShiftIntensity(offsets=1.0, prob=1.0)\n shifter.set_random_state(seed=0)\n result = shifter(self.imt)\n np.random.seed(0)\n expected = self.imt + np.random.uniform(low=-1.0, high=1.0)\n np.testing.assert_allclose(result, expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_shift_intensityd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_shift_intensityd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_shift_intensityd.py", "file_name": "test_rand_shift_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 33, "span_ids": ["TestRandShiftIntensityd", "TestRandShiftIntensityd.test_value", "impl", "docstring"], "tokens": 152}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import RandShiftIntensityd\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestRandShiftIntensityd(NumpyImageTestCase2D):\n def test_value(self):\n key = \"img\"\n shifter = RandShiftIntensityd(keys=[key], offsets=1.0, prob=1.0)\n shifter.set_random_state(seed=0)\n result = shifter({key: self.imt})\n np.random.seed(0)\n expected = self.imt + np.random.uniform(low=-1.0, high=1.0)\n np.testing.assert_allclose(result[key], expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop.py_unittest_TEST_CASE_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop.py_unittest_TEST_CASE_3._", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_crop.py", "file_name": "test_rand_spatial_crop.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 36, "span_ids": ["docstring"], "tokens": 312}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandSpatialCrop\n\nTEST_CASE_0 = [\n {\"roi_size\": [3, 3, -1], \"random_center\": True},\n np.random.randint(0, 2, size=[3, 3, 3, 4]),\n (3, 3, 3, 4),\n]\n\nTEST_CASE_1 = [{\"roi_size\": [3, 3, 3], \"random_center\": True}, np.random.randint(0, 2, size=[3, 3, 3, 3]), (3, 3, 3, 3)]\n\nTEST_CASE_2 = [\n {\"roi_size\": [3, 3, 3], \"random_center\": False},\n np.random.randint(0, 2, size=[3, 3, 3, 3]),\n (3, 3, 3, 3),\n]\n\nTEST_CASE_3 = [\n {\"roi_size\": [3, 3], \"random_center\": False},\n np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop.py_TestRandSpatialCrop_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop.py_TestRandSpatialCrop_", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_crop.py", "file_name": "test_rand_spatial_crop.py", "file_type": "text/x-python", "category": "test", "start_line": 37, "end_line": 53, "span_ids": ["impl:9", "TestRandSpatialCrop.test_value", "TestRandSpatialCrop", "TestRandSpatialCrop.test_shape"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandSpatialCrop(unittest.TestCase):\n @parameterized.expand([TEST_CASE_0, TEST_CASE_1, TEST_CASE_2])\n def test_shape(self, input_param, input_data, expected_shape):\n result = RandSpatialCrop(**input_param)(input_data)\n self.assertTupleEqual(result.shape, expected_shape)\n\n @parameterized.expand([TEST_CASE_3])\n def test_value(self, input_param, input_data):\n cropper = RandSpatialCrop(**input_param)\n result = cropper(input_data)\n roi = [(2 - i // 2, 2 + i - i // 2) for i in cropper._size]\n np.testing.assert_allclose(result, input_data[:, roi[0][0] : roi[0][1], roi[1][0] : roi[1][1]])\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_cropd.py_unittest_TEST_CASE_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_cropd.py_unittest_TEST_CASE_3._", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_cropd.py", "file_name": "test_rand_spatial_cropd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 40, "span_ids": ["docstring"], "tokens": 358}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandSpatialCropd\n\nTEST_CASE_0 = [\n {\"keys\": \"img\", \"roi_size\": [3, 3, -1], \"random_center\": True},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 5])},\n (3, 3, 3, 5),\n]\n\nTEST_CASE_1 = [\n {\"keys\": \"img\", \"roi_size\": [3, 3, 3], \"random_center\": True},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 3, 3, 3),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"img\", \"roi_size\": [3, 3, 3], \"random_center\": False},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 3, 3, 3),\n]\n\nTEST_CASE_3 = [\n {\"keys\": \"img\", \"roi_size\": [3, 3], \"random_center\": False},\n {\"img\": np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]])},\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_cropd.py_TestRandSpatialCropd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_cropd.py_TestRandSpatialCropd_", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_cropd.py", "file_name": "test_rand_spatial_cropd.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 57, "span_ids": ["TestRandSpatialCropd", "impl:9", "TestRandSpatialCropd.test_shape", "TestRandSpatialCropd.test_value"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandSpatialCropd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_0, TEST_CASE_1, TEST_CASE_2])\n def test_shape(self, input_param, input_data, expected_shape):\n result = RandSpatialCropd(**input_param)(input_data)\n self.assertTupleEqual(result[\"img\"].shape, expected_shape)\n\n @parameterized.expand([TEST_CASE_3])\n def test_value(self, input_param, input_data):\n cropper = RandSpatialCropd(**input_param)\n result = cropper(input_data)\n roi = [(2 - i // 2, 2 + i - i // 2) for i in cropper._size]\n np.testing.assert_allclose(result[\"img\"], input_data[\"img\"][:, roi[0][0] : roi[0][1], roi[1][0] : roi[1][1]])\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoom.py_unittest_TestRandZoom.test_correct_results.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoom.py_unittest_TestRandZoom.test_correct_results.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_rand_zoom.py", "file_name": "test_rand_zoom.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["TestRandZoom", "TestRandZoom.test_correct_results", "docstring"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\nfrom scipy.ndimage import zoom as zoom_scipy\n\nfrom monai.transforms import RandZoom\nfrom monai.utils import GridSampleMode, InterpolateMode\nfrom tests.utils import NumpyImageTestCase2D\n\nVALID_CASES = [(0.8, 1.2, \"nearest\", False), (0.8, 1.2, InterpolateMode.NEAREST, False)]\n\n\nclass TestRandZoom(NumpyImageTestCase2D):\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, min_zoom, max_zoom, mode, keep_size):\n random_zoom = RandZoom(\n prob=1.0,\n min_zoom=min_zoom,\n max_zoom=max_zoom,\n mode=mode,\n keep_size=keep_size,\n )\n random_zoom.set_random_state(1234)\n zoomed = random_zoom(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(zoom_scipy(channel, zoom=random_zoom._zoom, mode=\"nearest\", order=0, prefilter=False))\n expected = np.stack(expected).astype(np.float32)\n np.testing.assert_allclose(zoomed, expected, atol=1.0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoom.py_TestRandZoom.test_keep_size_TestRandZoom.test_keep_size.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoom.py_TestRandZoom.test_keep_size_TestRandZoom.test_keep_size.None_2", "embedding": null, "metadata": {"file_path": "tests/test_rand_zoom.py", "file_name": "test_rand_zoom.py", "file_type": "text/x-python", "category": "test", "start_line": 37, "end_line": 44, "span_ids": ["TestRandZoom.test_keep_size"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandZoom(NumpyImageTestCase2D):\n\n def test_keep_size(self):\n random_zoom = RandZoom(prob=1.0, min_zoom=0.6, max_zoom=0.7, keep_size=True)\n zoomed = random_zoom(self.imt[0])\n self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape[1:]))\n zoomed = random_zoom(self.imt[0])\n self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape[1:]))\n zoomed = random_zoom(self.imt[0])\n self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape[1:]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoomd.py_unittest_TestRandZoomd.test_correct_results.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_zoomd.py_unittest_TestRandZoomd.test_correct_results.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_rand_zoomd.py", "file_name": "test_rand_zoomd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["TestRandZoomd", "TestRandZoomd.test_correct_results", "docstring"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\nfrom scipy.ndimage import zoom as zoom_scipy\n\nfrom monai.transforms import RandZoomd\nfrom tests.utils import NumpyImageTestCase2D\n\nVALID_CASES = [(0.8, 1.2, \"nearest\", None, False)]\n\n\nclass TestRandZoomd(NumpyImageTestCase2D):\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, min_zoom, max_zoom, mode, align_corners, keep_size):\n key = \"img\"\n random_zoom = RandZoomd(\n key,\n prob=1.0,\n min_zoom=min_zoom,\n max_zoom=max_zoom,\n mode=mode,\n align_corners=align_corners,\n keep_size=keep_size,\n )\n random_zoom.set_random_state(1234)\n\n zoomed = random_zoom({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(zoom_scipy(channel, zoom=random_zoom._zoom, mode=\"nearest\", order=0, prefilter=False))\n expected = np.stack(expected).astype(np.float32)\n np.testing.assert_allclose(expected, zoomed[key], atol=1.0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_randomizable.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_randomizable.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_randomizable.py", "file_name": "test_randomizable.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 49, "span_ids": ["TestRandomizable.test_state", "impl", "TestRandomizable.test_default", "RandTest.randomize", "docstring", "RandTest", "TestRandomizable", "TestRandomizable.test_seed"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms.transform import Randomizable\n\n\nclass RandTest(Randomizable):\n def randomize(self, data=None):\n pass\n\n\nclass TestRandomizable(unittest.TestCase):\n def test_default(self):\n inst = RandTest()\n r1 = inst.R.rand()\n self.assertTrue(isinstance(inst.R, np.random.RandomState))\n inst.set_random_state()\n r2 = inst.R.rand()\n self.assertNotAlmostEqual(r1, r2)\n\n def test_seed(self):\n inst = RandTest()\n inst.set_random_state(seed=123)\n self.assertAlmostEqual(inst.R.rand(), 0.69646918)\n inst.set_random_state(123)\n self.assertAlmostEqual(inst.R.rand(), 0.69646918)\n\n def test_state(self):\n inst = RandTest()\n inst_r = np.random.RandomState(123)\n inst.set_random_state(state=inst_r)\n self.assertAlmostEqual(inst.R.rand(), 0.69646918)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_repeat_channel.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_repeat_channel.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_repeat_channel.py", "file_name": "test_repeat_channel.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 31, "span_ids": ["TestRepeatChannel", "impl:3", "TestRepeatChannel.test_shape", "docstring"], "tokens": 127}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RepeatChannel\n\nTEST_CASE_1 = [{\"repeats\": 3}, np.array([[[0, 1], [1, 2]]]), (3, 2, 2)]\n\n\nclass TestRepeatChannel(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1])\n def test_shape(self, input_param, input_data, expected_shape):\n result = RepeatChannel(**input_param)(input_data)\n self.assertEqual(result.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_repeat_channeld.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_repeat_channeld.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_repeat_channeld.py", "file_name": "test_repeat_channeld.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 35, "span_ids": ["TestRepeatChanneld.test_shape", "impl:3", "TestRepeatChanneld", "docstring"], "tokens": 169}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RepeatChanneld\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\"], \"repeats\": 3},\n {\"img\": np.array([[[0, 1], [1, 2]]]), \"seg\": np.array([[[0, 1], [1, 2]]])},\n (3, 2, 2),\n]\n\n\nclass TestRepeatChanneld(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1])\n def test_shape(self, input_param, input_data, expected_shape):\n result = RepeatChanneld(**input_param)(input_data)\n self.assertEqual(result[\"img\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resampler.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resampler.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_resampler.py", "file_name": "test_resampler.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 70, "span_ids": ["docstring"], "tokens": 1324}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import Resample\nfrom monai.transforms.utils import create_grid\n\nTEST_CASES = [\n [\n dict(padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"grid\": create_grid((2, 2)), \"img\": np.arange(4).reshape((1, 2, 2))},\n np.array([[[0.0, 1.0], [2.0, 3.0]]]),\n ],\n [\n dict(padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"grid\": create_grid((4, 4)), \"img\": np.arange(4).reshape((1, 2, 2))},\n np.array([[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 2.0, 3.0, 0.0], [0.0, 0.0, 0.0, 0.0]]]),\n ],\n [\n dict(padding_mode=\"border\", as_tensor_output=False, device=None),\n {\"grid\": create_grid((4, 4)), \"img\": np.arange(4).reshape((1, 2, 2))},\n np.array([[[0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 1.0, 1.0], [2.0, 2.0, 3, 3.0], [2.0, 2.0, 3.0, 3.0]]]),\n ],\n [\n dict(padding_mode=\"reflection\", as_tensor_output=False, device=None),\n {\"grid\": create_grid((4, 4)), \"img\": np.arange(4).reshape((1, 2, 2)), \"mode\": \"nearest\"},\n np.array([[[3.0, 2.0, 3.0, 2.0], [1.0, 0.0, 1.0, 0.0], [3.0, 2.0, 3.0, 2.0], [1.0, 0.0, 1.0, 0.0]]]),\n ],\n [\n dict(padding_mode=\"zeros\", as_tensor_output=False, device=None),\n {\"grid\": create_grid((4, 4, 4)), \"img\": np.arange(8).reshape((1, 2, 2, 2)), \"mode\": \"bilinear\"},\n np.array(\n [\n [\n [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 2.0, 3.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 4.0, 5.0, 0.0], [0.0, 6.0, 7.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n [[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]],\n ]\n ]\n ),\n ],\n [\n dict(padding_mode=\"border\", as_tensor_output=False, device=None),\n {\"grid\": create_grid((4, 4, 4)), \"img\": np.arange(8).reshape((1, 2, 2, 2)), \"mode\": \"bilinear\"},\n np.array(\n [\n [\n [[0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 1.0, 1.0], [2.0, 2.0, 3.0, 3.0], [2.0, 2.0, 3.0, 3.0]],\n [[0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 1.0, 1.0], [2.0, 2.0, 3.0, 3.0], [2.0, 2.0, 3.0, 3.0]],\n [[4.0, 4.0, 5.0, 5.0], [4.0, 4.0, 5.0, 5.0], [6.0, 6.0, 7.0, 7.0], [6.0, 6.0, 7.0, 7.0]],\n [[4.0, 4.0, 5.0, 5.0], [4.0, 4.0, 5.0, 5.0], [6.0, 6.0, 7.0, 7.0], [6.0, 6.0, 7.0, 7.0]],\n ]\n ]\n ),\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resampler.py_TestResample_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resampler.py_TestResample_", "embedding": null, "metadata": {"file_path": "tests/test_resampler.py", "file_name": "test_resampler.py", "file_type": "text/x-python", "category": "test", "start_line": 73, "end_line": 87, "span_ids": ["TestResample.test_resample", "impl:3", "TestResample"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestResample(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_resample(self, input_param, input_data, expected_val):\n g = Resample(**input_param)\n result = g(**input_data)\n self.assertEqual(isinstance(result, torch.Tensor), isinstance(expected_val, torch.Tensor))\n if isinstance(result, torch.Tensor):\n np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4)\n else:\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resize.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resize.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_resize.py", "file_name": "test_resize.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 56, "span_ids": ["TestResize", "impl", "TestResize.test_invalid_inputs", "TestResize.test_correct_results", "docstring"], "tokens": 332}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport skimage.transform\nfrom parameterized import parameterized\n\nfrom monai.transforms import Resize\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestResize(NumpyImageTestCase2D):\n def test_invalid_inputs(self):\n with self.assertRaises(ValueError):\n resize = Resize(spatial_size=(128, 128, 3), mode=\"order\")\n resize(self.imt[0])\n\n with self.assertRaises(ValueError):\n resize = Resize(spatial_size=(128,), mode=\"order\")\n resize(self.imt[0])\n\n @parameterized.expand(\n [((32, -1), \"area\"), ((32, 32), \"area\"), ((32, 32, 32), \"trilinear\"), ((256, 256), \"bilinear\")]\n )\n def test_correct_results(self, spatial_size, mode):\n resize = Resize(spatial_size, mode=mode)\n _order = 0\n if mode.endswith(\"linear\"):\n _order = 1\n if spatial_size == (32, -1):\n spatial_size = (32, 64)\n expected = []\n for channel in self.imt[0]:\n expected.append(\n skimage.transform.resize(\n channel, spatial_size, order=_order, clip=False, preserve_range=False, anti_aliasing=False\n )\n )\n expected = np.stack(expected).astype(np.float32)\n out = resize(self.imt[0])\n np.testing.assert_allclose(out, expected, atol=0.9)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resized.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resized.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_resized.py", "file_name": "test_resized.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 54, "span_ids": ["TestResized", "TestResized.test_correct_results", "impl", "TestResized.test_invalid_inputs", "docstring"], "tokens": 354}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport skimage.transform\nfrom parameterized import parameterized\n\nfrom monai.transforms import Resized\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestResized(NumpyImageTestCase2D):\n def test_invalid_inputs(self):\n with self.assertRaises(ValueError):\n resize = Resized(keys=\"img\", spatial_size=(128, 128, 3), mode=\"order\")\n resize({\"img\": self.imt[0]})\n\n with self.assertRaises(ValueError):\n resize = Resized(keys=\"img\", spatial_size=(128,), mode=\"order\")\n resize({\"img\": self.imt[0]})\n\n @parameterized.expand([((32, -1), \"area\"), ((64, 64), \"area\"), ((32, 32, 32), \"area\"), ((256, 256), \"bilinear\")])\n def test_correct_results(self, spatial_size, mode):\n resize = Resized(\"img\", spatial_size, mode)\n _order = 0\n if mode.endswith(\"linear\"):\n _order = 1\n if spatial_size == (32, -1):\n spatial_size = (32, 64)\n expected = []\n for channel in self.imt[0]:\n expected.append(\n skimage.transform.resize(\n channel, spatial_size, order=_order, clip=False, preserve_range=False, anti_aliasing=False\n )\n )\n expected = np.stack(expected).astype(np.float32)\n out = resize({\"img\": self.imt[0]})[\"img\"]\n np.testing.assert_allclose(out, expected, atol=0.9)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate.py_unittest_TEST_CASES_SHAPE_3D._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate.py_unittest_TEST_CASES_SHAPE_3D._", "embedding": null, "metadata": {"file_path": "tests/test_rotate.py", "file_name": "test_rotate.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["docstring"], "tokens": 331}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport scipy.ndimage\nfrom parameterized import parameterized\n\nfrom monai.transforms import Rotate\nfrom tests.utils import NumpyImageTestCase2D, NumpyImageTestCase3D\n\nTEST_CASES_2D = [\n (np.pi / 6, False, \"bilinear\", \"border\", False),\n (np.pi / 4, True, \"bilinear\", \"border\", False),\n (-np.pi / 4.5, True, \"nearest\", \"reflection\", False),\n (np.pi, False, \"nearest\", \"zeros\", False),\n (-np.pi / 2, False, \"bilinear\", \"zeros\", True),\n]\n\nTEST_CASES_3D = [\n (-np.pi / 2, True, \"nearest\", \"border\", False),\n (np.pi / 4, True, \"bilinear\", \"border\", False),\n (-np.pi / 4.5, True, \"nearest\", \"reflection\", False),\n (np.pi, False, \"nearest\", \"zeros\", False),\n (-np.pi / 2, False, \"bilinear\", \"zeros\", False),\n]\n\nTEST_CASES_SHAPE_3D = [\n ([-np.pi / 2, 1.0, 2.0], \"nearest\", \"border\", False),\n ([np.pi / 4, 0, 0], \"bilinear\", \"border\", False),\n ([-np.pi / 4.5, -20, 20], \"nearest\", \"reflection\", False),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate.py_TestRotate3D.test_correct_shape_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate.py_TestRotate3D.test_correct_shape_", "embedding": null, "metadata": {"file_path": "tests/test_rotate.py", "file_name": "test_rotate.py", "file_type": "text/x-python", "category": "test", "start_line": 91, "end_line": 109, "span_ids": ["TestRotate3D.test_correct_shape", "impl:7", "TestRotate3D.test_ill_case"], "tokens": 180}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRotate3D(NumpyImageTestCase3D):\n\n @parameterized.expand(TEST_CASES_SHAPE_3D)\n def test_correct_shape(self, angle, mode, padding_mode, align_corners):\n rotate_fn = Rotate(angle, True, align_corners=align_corners)\n rotated = rotate_fn(self.imt[0], mode=mode, padding_mode=padding_mode)\n np.testing.assert_allclose(self.imt[0].shape, rotated.shape)\n\n def test_ill_case(self):\n rotate_fn = Rotate(10, True)\n with self.assertRaises(ValueError): # wrong shape\n rotate_fn(self.imt)\n\n rotate_fn = Rotate(10, keep_size=False)\n with self.assertRaises(ValueError): # wrong mode\n rotate_fn(self.imt[0], mode=\"trilinear\")\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate90.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate90.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rotate90.py", "file_name": "test_rotate90.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 60, "span_ids": ["TestRotate90.test_k", "impl", "TestRotate90.test_rotate90_default", "TestRotate90.test_prob_k_spatial_axes", "TestRotate90.test_spatial_axes", "TestRotate90", "docstring"], "tokens": 364}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import Rotate90\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestRotate90(NumpyImageTestCase2D):\n def test_rotate90_default(self):\n rotate = Rotate90()\n rotated = rotate(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 1, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated, expected))\n\n def test_k(self):\n rotate = Rotate90(k=2)\n rotated = rotate(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 2, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated, expected))\n\n def test_spatial_axes(self):\n rotate = Rotate90(spatial_axes=(0, -1))\n rotated = rotate(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 1, (0, -1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated, expected))\n\n def test_prob_k_spatial_axes(self):\n rotate = Rotate90(k=2, spatial_axes=(0, 1))\n rotated = rotate(self.imt[0])\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 2, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated, expected))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate90d.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotate90d.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rotate90d.py", "file_name": "test_rotate90d.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 70, "span_ids": ["impl", "TestRotate90d.test_prob_k_spatial_axes", "TestRotate90d.test_rotate90_default", "TestRotate90d", "docstring", "TestRotate90d.test_spatial_axes", "TestRotate90d.test_k", "TestRotate90d.test_no_key"], "tokens": 463}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import Rotate90d\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestRotate90d(NumpyImageTestCase2D):\n def test_rotate90_default(self):\n key = \"test\"\n rotate = Rotate90d(keys=key)\n rotated = rotate({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 1, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated[key], expected))\n\n def test_k(self):\n key = None\n rotate = Rotate90d(keys=key, k=2)\n rotated = rotate({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 2, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated[key], expected))\n\n def test_spatial_axes(self):\n key = \"test\"\n rotate = Rotate90d(keys=key, spatial_axes=(0, 1))\n rotated = rotate({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 1, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated[key], expected))\n\n def test_prob_k_spatial_axes(self):\n key = \"test\"\n rotate = Rotate90d(keys=key, k=2, spatial_axes=(0, 1))\n rotated = rotate({key: self.imt[0]})\n expected = []\n for channel in self.imt[0]:\n expected.append(np.rot90(channel, 2, (0, 1)))\n expected = np.stack(expected)\n self.assertTrue(np.allclose(rotated[key], expected))\n\n def test_no_key(self):\n key = \"unknown\"\n rotate = Rotate90d(keys=key)\n with self.assertRaisesRegex(KeyError, \"\"):\n rotate({\"test\": self.imt[0]})\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_unittest_TEST_CASES_3D._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_unittest_TEST_CASES_3D._", "embedding": null, "metadata": {"file_path": "tests/test_rotated.py", "file_name": "test_rotated.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 35, "span_ids": ["docstring"], "tokens": 248}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport scipy.ndimage\nfrom parameterized import parameterized\n\nfrom monai.transforms import Rotated\nfrom tests.utils import NumpyImageTestCase2D, NumpyImageTestCase3D\n\nTEST_CASES_2D = [\n (-np.pi / 6, False, \"bilinear\", \"border\", False),\n (-np.pi / 4, True, \"bilinear\", \"border\", False),\n (np.pi / 4.5, True, \"nearest\", \"reflection\", False),\n (-np.pi, False, \"nearest\", \"zeros\", False),\n (np.pi / 2, False, \"bilinear\", \"zeros\", True),\n]\n\nTEST_CASES_3D = [\n (-np.pi / 6, False, \"bilinear\", \"border\", False),\n (-np.pi / 4, True, \"bilinear\", \"border\", False),\n (np.pi / 4.5, True, \"nearest\", \"reflection\", False),\n (-np.pi, False, \"nearest\", \"zeros\", False),\n (np.pi / 2, False, \"bilinear\", \"zeros\", True),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated2D_TestRotated2D.test_correct_results.self_assertLessEqual_np_c": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated2D_TestRotated2D.test_correct_results.self_assertLessEqual_np_c", "embedding": null, "metadata": {"file_path": "tests/test_rotated.py", "file_name": "test_rotated.py", "file_type": "text/x-python", "category": "test", "start_line": 38, "end_line": 62, "span_ids": ["TestRotated2D.test_correct_results", "TestRotated2D"], "tokens": 353}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRotated2D(NumpyImageTestCase2D):\n @parameterized.expand(TEST_CASES_2D)\n def test_correct_results(self, angle, keep_size, mode, padding_mode, align_corners):\n rotate_fn = Rotated((\"img\", \"seg\"), angle, keep_size, (mode, \"nearest\"), padding_mode, align_corners)\n rotated = rotate_fn({\"img\": self.imt[0], \"seg\": self.segn[0]})\n if keep_size:\n np.testing.assert_allclose(self.imt[0].shape, rotated[\"img\"].shape)\n _order = 0 if mode == \"nearest\" else 1\n if padding_mode == \"border\":\n _mode = \"nearest\"\n elif padding_mode == \"reflection\":\n _mode = \"reflect\"\n else:\n _mode = \"constant\"\n expected = scipy.ndimage.rotate(\n self.imt[0, 0], -np.rad2deg(angle), (0, 1), not keep_size, order=_order, mode=_mode, prefilter=False\n )\n good = np.sum(np.isclose(expected, rotated[\"img\"][0], atol=1e-3))\n self.assertLessEqual(np.abs(good - expected.size), 5, \"diff at most 5 pixels\")\n\n expected = scipy.ndimage.rotate(\n self.segn[0, 0], -np.rad2deg(angle), (0, 1), not keep_size, order=0, mode=_mode, prefilter=False\n )\n expected = np.stack(expected).astype(int)\n self.assertLessEqual(np.count_nonzero(expected != rotated[\"seg\"][0]), 30)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated3D_TestRotated3D.test_correct_results.self_assertLessEqual_np_c": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated3D_TestRotated3D.test_correct_results.self_assertLessEqual_np_c", "embedding": null, "metadata": {"file_path": "tests/test_rotated.py", "file_name": "test_rotated.py", "file_type": "text/x-python", "category": "test", "start_line": 65, "end_line": 89, "span_ids": ["TestRotated3D", "TestRotated3D.test_correct_results"], "tokens": 362}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRotated3D(NumpyImageTestCase3D):\n @parameterized.expand(TEST_CASES_3D)\n def test_correct_results(self, angle, keep_size, mode, padding_mode, align_corners):\n rotate_fn = Rotated((\"img\", \"seg\"), [0, angle, 0], keep_size, (mode, \"nearest\"), padding_mode, align_corners)\n rotated = rotate_fn({\"img\": self.imt[0], \"seg\": self.segn[0]})\n if keep_size:\n np.testing.assert_allclose(self.imt[0].shape, rotated[\"img\"].shape)\n _order = 0 if mode == \"nearest\" else 1\n if padding_mode == \"border\":\n _mode = \"nearest\"\n elif padding_mode == \"reflection\":\n _mode = \"reflect\"\n else:\n _mode = \"constant\"\n expected = scipy.ndimage.rotate(\n self.imt[0, 0], np.rad2deg(angle), (0, 2), not keep_size, order=_order, mode=_mode, prefilter=False\n )\n good = np.sum(np.isclose(expected.astype(np.float32), rotated[\"img\"][0], atol=1e-3))\n self.assertLessEqual(np.abs(good - expected.size), 5, \"diff at most 5 voxels.\")\n\n expected = scipy.ndimage.rotate(\n self.segn[0, 0], np.rad2deg(angle), (0, 2), not keep_size, order=0, mode=_mode, prefilter=False\n )\n expected = np.stack(expected).astype(int)\n self.assertLessEqual(np.count_nonzero(expected != rotated[\"seg\"][0]), 110)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated3DXY_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rotated.py_TestRotated3DXY_", "embedding": null, "metadata": {"file_path": "tests/test_rotated.py", "file_name": "test_rotated.py", "file_type": "text/x-python", "category": "test", "start_line": 92, "end_line": 121, "span_ids": ["TestRotated3DXY.test_correct_results", "impl:5", "TestRotated3DXY"], "tokens": 373}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRotated3DXY(NumpyImageTestCase3D):\n @parameterized.expand(TEST_CASES_3D)\n def test_correct_results(self, angle, keep_size, mode, padding_mode, align_corners):\n rotate_fn = Rotated((\"img\", \"seg\"), [0, 0, angle], keep_size, (mode, \"nearest\"), padding_mode, align_corners)\n rotated = rotate_fn({\"img\": self.imt[0], \"seg\": self.segn[0]})\n if keep_size:\n np.testing.assert_allclose(self.imt[0].shape, rotated[\"img\"].shape)\n _order = 0 if mode == \"nearest\" else 1\n if padding_mode == \"border\":\n _mode = \"nearest\"\n elif padding_mode == \"reflection\":\n _mode = \"reflect\"\n else:\n _mode = \"constant\"\n expected = scipy.ndimage.rotate(\n self.imt[0, 0], -np.rad2deg(angle), (0, 1), not keep_size, order=_order, mode=_mode, prefilter=False\n )\n good = np.sum(np.isclose(expected, rotated[\"img\"][0], atol=1e-3))\n self.assertLessEqual(np.abs(good - expected.size), 5, \"diff at most 5 voxels\")\n\n expected = scipy.ndimage.rotate(\n self.segn[0, 0], -np.rad2deg(angle), (0, 1), not keep_size, order=0, mode=_mode, prefilter=False\n )\n expected = np.stack(expected).astype(int)\n self.assertLessEqual(np.count_nonzero(expected != rotated[\"seg\"][0]), 110)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity.py", "file_name": "test_scale_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 39, "span_ids": ["impl", "TestScaleIntensity", "docstring", "TestScaleIntensity.test_range_scale", "TestScaleIntensity.test_factor_scale"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import ScaleIntensity\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestScaleIntensity(NumpyImageTestCase2D):\n def test_range_scale(self):\n scaler = ScaleIntensity(minv=1.0, maxv=2.0)\n result = scaler(self.imt)\n mina = np.min(self.imt)\n maxa = np.max(self.imt)\n norm = (self.imt - mina) / (maxa - mina)\n expected = (norm * (2.0 - 1.0)) + 1.0\n np.testing.assert_allclose(result, expected)\n\n def test_factor_scale(self):\n scaler = ScaleIntensity(minv=None, maxv=None, factor=0.1)\n result = scaler(self.imt)\n expected = (self.imt * (1 + 0.1)).astype(np.float32)\n np.testing.assert_allclose(result, expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity_range.py", "file_name": "test_scale_intensity_range.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 31, "span_ids": ["IntensityScaleIntensityRange", "impl", "IntensityScaleIntensityRange.test_image_scale_intensity_range", "docstring"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import ScaleIntensityRange\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass IntensityScaleIntensityRange(NumpyImageTestCase2D):\n def test_image_scale_intensity_range(self):\n scaler = ScaleIntensityRange(a_min=20, a_max=108, b_min=50, b_max=80)\n scaled = scaler(self.imt)\n expected = (self.imt - 20) / 88\n expected = expected * 30 + 50\n self.assertTrue(np.allclose(scaled, expected))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_unittest_TestScaleIntensityRangePercentiles.test_scaling.self_assertTrue_np_allclo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_unittest_TestScaleIntensityRangePercentiles.test_scaling.self_assertTrue_np_allclo", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity_range_percentiles.py", "file_name": "test_scale_intensity_range_percentiles.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 33, "span_ids": ["TestScaleIntensityRangePercentiles.test_scaling", "TestScaleIntensityRangePercentiles", "docstring"], "tokens": 179}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms.intensity.array import ScaleIntensityRangePercentiles\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestScaleIntensityRangePercentiles(NumpyImageTestCase2D):\n def test_scaling(self):\n img = self.imt\n lower = 10\n upper = 99\n b_min = 0\n b_max = 255\n\n a_min = np.percentile(img, lower)\n a_max = np.percentile(img, upper)\n expected = (img - a_min) / (a_max - a_min)\n expected = (expected * (b_max - b_min)) + b_min\n scaler = ScaleIntensityRangePercentiles(lower=lower, upper=upper, b_min=b_min, b_max=b_max)\n self.assertTrue(np.allclose(expected, scaler(img)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_TestScaleIntensityRangePercentiles.test_relative_scaling_TestScaleIntensityRangePercentiles.test_relative_scaling.self_assertTrue_np_allclo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_TestScaleIntensityRangePercentiles.test_relative_scaling_TestScaleIntensityRangePercentiles.test_relative_scaling.self_assertTrue_np_allclo", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity_range_percentiles.py", "file_name": "test_scale_intensity_range_percentiles.py", "file_type": "text/x-python", "category": "test", "start_line": 35, "end_line": 50, "span_ids": ["TestScaleIntensityRangePercentiles.test_relative_scaling"], "tokens": 212}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScaleIntensityRangePercentiles(NumpyImageTestCase2D):\n\n def test_relative_scaling(self):\n img = self.imt\n lower = 10\n upper = 99\n b_min = 100\n b_max = 300\n scaler = ScaleIntensityRangePercentiles(lower=lower, upper=upper, b_min=b_min, b_max=b_max, relative=True)\n\n expected_a_min = np.percentile(img, lower)\n expected_a_max = np.percentile(img, upper)\n expected_b_min = ((b_max - b_min) * (lower / 100.0)) + b_min\n expected_b_max = ((b_max - b_min) * (upper / 100.0)) + b_min\n expected_img = (img - expected_a_min) / (expected_a_max - expected_a_min)\n expected_img = (expected_img * (expected_b_max - expected_b_min)) + expected_b_min\n\n self.assertTrue(np.allclose(expected_img, scaler(img)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_TestScaleIntensityRangePercentiles.test_invalid_instantiation_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentiles.py_TestScaleIntensityRangePercentiles.test_invalid_instantiation_", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity_range_percentiles.py", "file_name": "test_scale_intensity_range_percentiles.py", "file_type": "text/x-python", "category": "test", "start_line": 52, "end_line": 61, "span_ids": ["TestScaleIntensityRangePercentiles.test_invalid_instantiation", "impl"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScaleIntensityRangePercentiles(NumpyImageTestCase2D):\n\n def test_invalid_instantiation(self):\n self.assertRaises(AssertionError, ScaleIntensityRangePercentiles, lower=-10, upper=99, b_min=0, b_max=255)\n self.assertRaises(AssertionError, ScaleIntensityRangePercentiles, lower=101, upper=99, b_min=0, b_max=255)\n self.assertRaises(AssertionError, ScaleIntensityRangePercentiles, lower=30, upper=-20, b_min=0, b_max=255)\n self.assertRaises(AssertionError, ScaleIntensityRangePercentiles, lower=30, upper=900, b_min=0, b_max=255)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_unittest_TestScaleIntensityRangePercentilesd.test_scaling.self_assertTrue_np_allclo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_unittest_TestScaleIntensityRangePercentilesd.test_scaling.self_assertTrue_np_allclo", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity_range_percentilesd.py", "file_name": "test_scale_intensity_range_percentilesd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 37, "span_ids": ["TestScaleIntensityRangePercentilesd", "TestScaleIntensityRangePercentilesd.test_scaling", "docstring"], "tokens": 200}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms.intensity.dictionary import ScaleIntensityRangePercentilesd\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestScaleIntensityRangePercentilesd(NumpyImageTestCase2D):\n def test_scaling(self):\n img = self.imt\n data = {}\n data[\"img\"] = img\n lower = 10\n upper = 99\n b_min = 0\n b_max = 255\n\n a_min = np.percentile(img, lower)\n a_max = np.percentile(img, upper)\n expected = (img - a_min) / (a_max - a_min)\n expected = (expected * (b_max - b_min)) + b_min\n\n scaler = ScaleIntensityRangePercentilesd(keys=data.keys(), lower=lower, upper=upper, b_min=b_min, b_max=b_max)\n\n self.assertTrue(np.allclose(expected, scaler(data)[\"img\"]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_TestScaleIntensityRangePercentilesd.test_relative_scaling_TestScaleIntensityRangePercentilesd.test_relative_scaling.self_assertTrue_np_allclo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_TestScaleIntensityRangePercentilesd.test_relative_scaling_TestScaleIntensityRangePercentilesd.test_relative_scaling.self_assertTrue_np_allclo", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity_range_percentilesd.py", "file_name": "test_scale_intensity_range_percentilesd.py", "file_type": "text/x-python", "category": "test", "start_line": 39, "end_line": 58, "span_ids": ["TestScaleIntensityRangePercentilesd.test_relative_scaling"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScaleIntensityRangePercentilesd(NumpyImageTestCase2D):\n\n def test_relative_scaling(self):\n img = self.imt\n data = {}\n data[\"img\"] = img\n lower = 10\n upper = 99\n b_min = 100\n b_max = 300\n scaler = ScaleIntensityRangePercentilesd(\n keys=data.keys(), lower=lower, upper=upper, b_min=b_min, b_max=b_max, relative=True\n )\n\n expected_a_min = np.percentile(img, lower)\n expected_a_max = np.percentile(img, upper)\n expected_b_min = ((b_max - b_min) * (lower / 100.0)) + b_min\n expected_b_max = ((b_max - b_min) * (upper / 100.0)) + b_min\n expected_img = (img - expected_a_min) / (expected_a_max - expected_a_min)\n expected_img = (expected_img * (expected_b_max - expected_b_min)) + expected_b_min\n\n self.assertTrue(np.allclose(expected_img, scaler(data)[\"img\"]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_TestScaleIntensityRangePercentilesd.test_invalid_instantiation_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_range_percentilesd.py_TestScaleIntensityRangePercentilesd.test_invalid_instantiation_", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity_range_percentilesd.py", "file_name": "test_scale_intensity_range_percentilesd.py", "file_type": "text/x-python", "category": "test", "start_line": 60, "end_line": 77, "span_ids": ["TestScaleIntensityRangePercentilesd.test_invalid_instantiation", "impl"], "tokens": 189}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestScaleIntensityRangePercentilesd(NumpyImageTestCase2D):\n\n def test_invalid_instantiation(self):\n self.assertRaises(\n AssertionError, ScaleIntensityRangePercentilesd, keys=[\"img\"], lower=-1, upper=99, b_min=0, b_max=255\n )\n self.assertRaises(\n AssertionError, ScaleIntensityRangePercentilesd, keys=[\"img\"], lower=101, upper=99, b_min=0, b_max=255\n )\n self.assertRaises(\n AssertionError, ScaleIntensityRangePercentilesd, keys=[\"img\"], lower=30, upper=-2, b_min=0, b_max=255\n )\n self.assertRaises(\n AssertionError, ScaleIntensityRangePercentilesd, keys=[\"img\"], lower=30, upper=1000, b_min=0, b_max=255\n )\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_ranged.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensity_ranged.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensity_ranged.py", "file_name": "test_scale_intensity_ranged.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 32, "span_ids": ["IntensityScaleIntensityRanged", "IntensityScaleIntensityRanged.test_image_scale_intensity_ranged", "impl", "docstring"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import ScaleIntensityRanged\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass IntensityScaleIntensityRanged(NumpyImageTestCase2D):\n def test_image_scale_intensity_ranged(self):\n key = \"img\"\n scaler = ScaleIntensityRanged(keys=key, a_min=20, a_max=108, b_min=50, b_max=80)\n scaled = scaler({key: self.imt})\n expected = (self.imt - 20) / 88\n expected = expected * 30 + 50\n self.assertTrue(np.allclose(scaled[key], expected))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensityd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_scale_intensityd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_scale_intensityd.py", "file_name": "test_scale_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["impl", "TestScaleIntensityd", "TestScaleIntensityd.test_factor_scale", "docstring", "TestScaleIntensityd.test_range_scale"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import ScaleIntensityd\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestScaleIntensityd(NumpyImageTestCase2D):\n def test_range_scale(self):\n key = \"img\"\n scaler = ScaleIntensityd(keys=[key], minv=1.0, maxv=2.0)\n result = scaler({key: self.imt})\n mina = np.min(self.imt)\n maxa = np.max(self.imt)\n norm = (self.imt - mina) / (maxa - mina)\n expected = (norm * (2.0 - 1.0)) + 1.0\n np.testing.assert_allclose(result[key], expected)\n\n def test_factor_scale(self):\n key = \"img\"\n scaler = ScaleIntensityd(keys=[key], minv=None, maxv=None, factor=0.1)\n result = scaler({key: self.imt})\n expected = (self.imt * (1 + 0.1)).astype(np.float32)\n np.testing.assert_allclose(result[key], expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_block.py_unittest_for_type_1_in_.for_type_2_in_.TEST_CASES_3D_append_test": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_block.py_unittest_for_type_1_in_.for_type_2_in_.TEST_CASES_3D_append_test", "embedding": null, "metadata": {"file_path": "tests/test_se_block.py", "file_name": "test_se_block.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 60, "span_ids": ["docstring"], "tokens": 519}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.blocks import SEBlock\nfrom monai.networks.layers.factories import Act, Norm\nfrom tests.utils import test_script_save\n\ndevice = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n\nTEST_CASES = [\n [\n {\"spatial_dims\": 2, \"in_channels\": 4, \"n_chns_1\": 20, \"n_chns_2\": 30, \"n_chns_3\": 4, \"r\": 2},\n (7, 4, 64, 48), # 4-channel 2D, batch 7\n (7, 4, 64, 48),\n ],\n [\n {\"spatial_dims\": 1, \"in_channels\": 3, \"n_chns_1\": 20, \"n_chns_2\": 30, \"n_chns_3\": 40, \"r\": 5},\n (16, 3, 63), # 3-channel 1D, batch 16\n (16, 40, 63),\n ],\n]\n\nTEST_CASES_3D = []\nfor type_1 in (\n {\"kernel_size\": 3, \"act\": Act.PRELU, \"norm\": Norm.INSTANCE},\n {\"kernel_size\": 1, \"act\": None, \"norm\": Norm.INSTANCE},\n):\n for type_2 in (\n {\"kernel_size\": 3, \"act\": Act.PRELU, \"norm\": Norm.INSTANCE},\n {\"kernel_size\": 1, \"act\": None, \"norm\": Norm.INSTANCE},\n ):\n test_case = [\n {\n \"spatial_dims\": 3,\n \"in_channels\": 10,\n \"r\": 3,\n \"n_chns_1\": 3,\n \"n_chns_2\": 5,\n \"n_chns_3\": 11,\n \"conv_param_1\": type_1,\n \"conv_param_3\": type_2,\n },\n (16, 10, 32, 24, 48), # 10-channel 3D, batch 16\n (16, 11, 32, 24, 48),\n ]\n TEST_CASES_3D.append(test_case)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_block.py_TestSEBlockLayer_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_se_block.py_TestSEBlockLayer_", "embedding": null, "metadata": {"file_path": "tests/test_se_block.py", "file_name": "test_se_block.py", "file_type": "text/x-python", "category": "test", "start_line": 63, "end_line": 84, "span_ids": ["TestSEBlockLayer.test_script", "TestSEBlockLayer", "impl:12", "TestSEBlockLayer.test_shape", "TestSEBlockLayer.test_ill_arg"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSEBlockLayer(unittest.TestCase):\n @parameterized.expand(TEST_CASES + TEST_CASES_3D)\n def test_shape(self, input_param, input_shape, expected_shape):\n net = SEBlock(**input_param).to(device)\n with eval_mode(net):\n result = net(torch.randn(input_shape).to(device))\n self.assertEqual(result.shape, expected_shape)\n\n def test_script(self):\n input_param, input_shape, _ = TEST_CASES[0]\n net = SEBlock(**input_param)\n test_data = torch.randn(input_shape)\n test_script_save(net, test_data)\n\n def test_ill_arg(self):\n with self.assertRaises(ValueError):\n SEBlock(spatial_dims=1, in_channels=4, n_chns_1=2, n_chns_2=3, n_chns_3=4, r=100)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_seg_loss_integration.py", "file_name": "test_seg_loss_integration.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["docstring"], "tokens": 707}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim\nfrom parameterized import parameterized\n\nfrom monai.losses import DiceLoss, FocalLoss, GeneralizedDiceLoss, TverskyLoss\nfrom monai.networks import one_hot\n\nTEST_CASES = [\n [DiceLoss, {\"to_onehot_y\": True, \"squared_pred\": True, \"smooth_nr\": 1e-4, \"smooth_dr\": 1e-4}, {}],\n [DiceLoss, {\"to_onehot_y\": True, \"squared_pred\": True, \"smooth_nr\": 0, \"smooth_dr\": 1e-3}, {}],\n [DiceLoss, {\"to_onehot_y\": False, \"squared_pred\": True, \"smooth_nr\": 0, \"smooth_dr\": 1e-3}, {}],\n [DiceLoss, {\"to_onehot_y\": True, \"squared_pred\": True, \"batch\": True}, {}],\n [DiceLoss, {\"to_onehot_y\": True, \"sigmoid\": True}, {}],\n [DiceLoss, {\"to_onehot_y\": True, \"softmax\": True}, {}],\n [FocalLoss, {\"to_onehot_y\": True, \"gamma\": 1.5, \"weight\": torch.tensor([1, 2])}, {}],\n [FocalLoss, {\"to_onehot_y\": False, \"gamma\": 1.5, \"weight\": [1, 2]}, {}],\n [FocalLoss, {\"to_onehot_y\": False, \"gamma\": 1.5, \"weight\": 1.0}, {}],\n [FocalLoss, {\"to_onehot_y\": True, \"gamma\": 1.5}, {}],\n [GeneralizedDiceLoss, {\"to_onehot_y\": True, \"softmax\": True}, {}],\n [GeneralizedDiceLoss, {\"to_onehot_y\": True, \"sigmoid\": True}, {}],\n [GeneralizedDiceLoss, {\"to_onehot_y\": True, \"sigmoid\": True, \"w_type\": \"simple\"}, {}],\n [GeneralizedDiceLoss, {\"to_onehot_y\": True, \"sigmoid\": True, \"w_type\": \"uniform\"}, {}],\n [GeneralizedDiceLoss, {\"to_onehot_y\": True, \"sigmoid\": True, \"w_type\": \"uniform\", \"batch\": True}, {}],\n [GeneralizedDiceLoss, {\"to_onehot_y\": False, \"sigmoid\": True, \"w_type\": \"uniform\", \"batch\": True}, {}],\n [TverskyLoss, {\"to_onehot_y\": True, \"softmax\": True, \"alpha\": 0.8, \"beta\": 0.2}, {}],\n [TverskyLoss, {\"to_onehot_y\": True, \"softmax\": True, \"alpha\": 0.8, \"beta\": 0.2, \"batch\": True}, {}],\n [TverskyLoss, {\"to_onehot_y\": True, \"softmax\": True, \"alpha\": 1.0, \"beta\": 0.0}, {}],\n [TverskyLoss, {\"to_onehot_y\": False, \"softmax\": True, \"alpha\": 1.0, \"beta\": 0.0}, {}],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration_TestSegLossIntegration.test_convergence._define_a_one_layer_mode": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration_TestSegLossIntegration.test_convergence._define_a_one_layer_mode", "embedding": null, "metadata": {"file_path": "tests/test_seg_loss_integration.py", "file_name": "test_seg_loss_integration.py", "file_type": "text/x-python", "category": "test", "start_line": 47, "end_line": 91, "span_ids": ["TestSegLossIntegration.setUp", "TestSegLossIntegration.test_convergence", "TestSegLossIntegration", "TestSegLossIntegration.tearDown"], "tokens": 473}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSegLossIntegration(unittest.TestCase):\n def setUp(self):\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False\n torch.manual_seed(0)\n self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu:0\")\n\n def tearDown(self):\n torch.backends.cudnn.deterministic = False\n torch.backends.cudnn.benchmark = True\n\n @parameterized.expand(TEST_CASES)\n def test_convergence(self, loss_type, loss_args, forward_args):\n \"\"\"\n The goal of this test is to assess if the gradient of the loss function\n is correct by testing if we can train a one layer neural network\n to segment one image.\n We verify that the loss is decreasing in almost all SGD steps.\n \"\"\"\n learning_rate = 0.001\n max_iter = 40\n\n # define a simple 3d example\n target_seg = torch.tensor(\n [\n [\n # raw 0\n [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]],\n # raw 1\n [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]],\n # raw 2\n [[0, 0, 0, 0], [0, 1, 1, 0], [0, 1, 1, 0], [0, 0, 0, 0]],\n ]\n ],\n device=self.device,\n )\n target_seg = torch.unsqueeze(target_seg, dim=0)\n image = 12 * target_seg + 27\n image = image.float().to(self.device)\n num_classes = 2\n num_voxels = 3 * 4 * 4\n\n target_onehot = one_hot(target_seg, num_classes=num_classes)\n\n # define a one layer model\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration.test_convergence.OnelayerNet_TestSegLossIntegration.test_convergence.OnelayerNet.forward.return.x": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration.test_convergence.OnelayerNet_TestSegLossIntegration.test_convergence.OnelayerNet.forward.return.x", "embedding": null, "metadata": {"file_path": "tests/test_seg_loss_integration.py", "file_name": "test_seg_loss_integration.py", "file_type": "text/x-python", "category": "test", "start_line": 80, "end_line": 93, "span_ids": ["TestSegLossIntegration.test_convergence"], "tokens": 185}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSegLossIntegration(unittest.TestCase):\n\n @parameterized.expand(TEST_CASES)\n def test_convergence(self, loss_type, loss_args, forward_args):\n # ... other code\n class OnelayerNet(nn.Module):\n def __init__(self):\n super(OnelayerNet, self).__init__()\n self.layer_1 = nn.Linear(num_voxels, 200)\n self.acti = nn.ReLU()\n self.layer_2 = nn.Linear(200, num_voxels * num_classes)\n\n def forward(self, x):\n x = x.view(-1, num_voxels)\n x = self.layer_1(x)\n x = self.acti(x)\n x = self.layer_2(x)\n x = x.view(-1, num_classes, 3, 4, 4)\n return x\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration.test_convergence._initialise_the_network_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_seg_loss_integration.py_TestSegLossIntegration.test_convergence._initialise_the_network_", "embedding": null, "metadata": {"file_path": "tests/test_seg_loss_integration.py", "file_name": "test_seg_loss_integration.py", "file_type": "text/x-python", "category": "test", "start_line": 107, "end_line": 156, "span_ids": ["impl:3", "TestSegLossIntegration.test_convergence"], "tokens": 384}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSegLossIntegration(unittest.TestCase):\n\n @parameterized.expand(TEST_CASES)\n def test_convergence(self, loss_type, loss_args, forward_args):\n\n # initialise the network\n net = OnelayerNet().to(self.device)\n\n # initialize the loss\n loss = loss_type(**loss_args)\n\n # initialize a SGD optimizer\n optimizer = optim.Adam(net.parameters(), lr=learning_rate)\n\n loss_history = []\n init_output = None\n\n # train the network\n for iter_i in range(max_iter):\n # set the gradient to zero\n optimizer.zero_grad()\n\n # forward pass\n output = net(image)\n if init_output is None:\n init_output = torch.argmax(output, 1).detach().cpu().numpy()\n\n if loss_args[\"to_onehot_y\"] is False:\n loss_val = loss(output, target_onehot, **forward_args)\n else:\n loss_val = loss(output, target_seg, **forward_args)\n\n if iter_i % 10 == 0:\n pred = torch.argmax(output, 1).detach().cpu().numpy()\n gt = target_seg.detach().cpu().numpy()[:, 0]\n print(f\"{loss_type.__name__} iter: {iter_i}, acc: {np.sum(pred == gt) / np.prod(pred.shape)}\")\n\n # backward pass\n loss_val.backward()\n optimizer.step()\n\n # stats\n loss_history.append(loss_val.item())\n\n pred = torch.argmax(output, 1).detach().cpu().numpy()\n target = target_seg.detach().cpu().numpy()[:, 0]\n # initial predictions are bad\n self.assertTrue(not np.allclose(init_output, target))\n # final predictions are good\n np.testing.assert_allclose(pred, target)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_set_determinism.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_set_determinism.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_set_determinism.py", "file_name": "test_set_determinism.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 54, "span_ids": ["impl", "TestSetDeterminism.test_values", "TestSetDeterminism", "docstring"], "tokens": 297}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.utils import get_seed, set_determinism\n\n\nclass TestSetDeterminism(unittest.TestCase):\n def test_values(self):\n # check system default flags\n set_determinism(None)\n self.assertTrue(not torch.backends.cudnn.deterministic)\n self.assertTrue(get_seed() is None)\n # set default seed\n set_determinism()\n self.assertTrue(get_seed() is not None)\n self.assertTrue(torch.backends.cudnn.deterministic)\n self.assertTrue(not torch.backends.cudnn.benchmark)\n # resume default\n set_determinism(None)\n self.assertTrue(not torch.backends.cudnn.deterministic)\n self.assertTrue(not torch.backends.cudnn.benchmark)\n self.assertTrue(get_seed() is None)\n # test seeds\n seed = 255\n set_determinism(seed=seed)\n self.assertEqual(seed, get_seed())\n a = np.random.randint(seed)\n b = torch.randint(seed, (1,))\n set_determinism(seed=seed)\n c = np.random.randint(seed)\n d = torch.randint(seed, (1,))\n self.assertEqual(a, c)\n self.assertEqual(b, d)\n self.assertTrue(torch.backends.cudnn.deterministic)\n self.assertTrue(not torch.backends.cudnn.benchmark)\n set_determinism(seed=None)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_shift_intensity.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_shift_intensity.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_shift_intensity.py", "file_name": "test_shift_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 30, "span_ids": ["TestShiftIntensity.test_value", "TestShiftIntensity", "impl", "docstring"], "tokens": 99}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import ShiftIntensity\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestShiftIntensity(NumpyImageTestCase2D):\n def test_value(self):\n shifter = ShiftIntensity(offset=1.0)\n result = shifter(self.imt)\n expected = self.imt + 1.0\n np.testing.assert_allclose(result, expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_shift_intensityd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_shift_intensityd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_shift_intensityd.py", "file_name": "test_shift_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 31, "span_ids": ["TestShiftIntensityd", "TestShiftIntensityd.test_value", "impl", "docstring"], "tokens": 116}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import ShiftIntensityd\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestShiftIntensityd(NumpyImageTestCase2D):\n def test_value(self):\n key = \"img\"\n shifter = ShiftIntensityd(keys=[key], offset=1.0)\n result = shifter({key: self.imt})\n expected = self.imt + 1.0\n np.testing.assert_allclose(result[key], expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simple_aspp.py_unittest_TEST_ILL_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simple_aspp.py_unittest_TEST_ILL_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_simple_aspp.py", "file_name": "test_simple_aspp.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 66, "span_ids": ["docstring"], "tokens": 559}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.blocks import SimpleASPP\n\nTEST_CASES = [\n [ # 32-channel 2D, batch 7\n {\"spatial_dims\": 2, \"in_channels\": 32, \"conv_out_channels\": 3},\n (7, 32, 18, 20),\n (7, 12, 18, 20),\n ],\n [ # 4-channel 1D, batch 16\n {\"spatial_dims\": 1, \"in_channels\": 4, \"conv_out_channels\": 8},\n (16, 4, 17),\n (16, 32, 17),\n ],\n [ # 3-channel 3D, batch 16\n {\"spatial_dims\": 3, \"in_channels\": 3, \"conv_out_channels\": 2},\n (16, 3, 17, 18, 19),\n (16, 8, 17, 18, 19),\n ],\n [ # 3-channel 3D, batch 16\n {\n \"spatial_dims\": 3,\n \"in_channels\": 3,\n \"conv_out_channels\": 2,\n \"kernel_sizes\": (1, 3, 3),\n \"dilations\": (1, 2, 4),\n },\n (16, 3, 17, 18, 19),\n (16, 6, 17, 18, 19),\n ],\n]\n\nTEST_ILL_CASES = [\n [ # 3-channel 3D, batch 16, wrong k and d sizes.\n {\"spatial_dims\": 3, \"in_channels\": 3, \"conv_out_channels\": 2, \"kernel_sizes\": (1, 3, 3), \"dilations\": (1, 2)},\n (16, 3, 17, 18, 19),\n ValueError,\n ],\n [ # 3-channel 3D, batch 16, wrong k and d sizes.\n {\n \"spatial_dims\": 3,\n \"in_channels\": 3,\n \"conv_out_channels\": 2,\n \"kernel_sizes\": (1, 3, 4),\n \"dilations\": (1, 2, 3),\n },\n (16, 3, 17, 18, 19),\n NotImplementedError, # unknown padding k=4, d=3\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simple_aspp.py_TestChannelSELayer_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simple_aspp.py_TestChannelSELayer_", "embedding": null, "metadata": {"file_path": "tests/test_simple_aspp.py", "file_name": "test_simple_aspp.py", "file_type": "text/x-python", "category": "test", "start_line": 69, "end_line": 85, "span_ids": ["TestChannelSELayer.test_shape", "impl:5", "TestChannelSELayer", "TestChannelSELayer.test_ill_args"], "tokens": 121}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestChannelSELayer(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_shape(self, input_param, input_shape, expected_shape):\n net = SimpleASPP(**input_param)\n with eval_mode(net):\n result = net(torch.randn(input_shape))\n self.assertEqual(result.shape, expected_shape)\n\n @parameterized.expand(TEST_ILL_CASES)\n def test_ill_args(self, input_param, input_shape, error_type):\n with self.assertRaises(error_type):\n SimpleASPP(**input_param)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacing.py_unittest_TEST_CASES": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacing.py_unittest_TEST_CASES", "embedding": null, "metadata": {"file_path": "tests/test_spacing.py", "file_name": "test_spacing.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 141, "span_ids": ["docstring"], "tokens": 39}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import Spacing\nfrom monai.utils import ensure_tuple\n\nTEST_CASES =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacing.py_TestSpacingCase_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacing.py_TestSpacingCase_", "embedding": null, "metadata": {"file_path": "tests/test_spacing.py", "file_name": "test_spacing.py", "file_type": "text/x-python", "category": "test", "start_line": 144, "end_line": 163, "span_ids": ["TestSpacingCase.test_spacing", "impl:3", "TestSpacingCase", "TestSpacingCase.test_ill_pixdim"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSpacingCase(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_spacing(self, init_param, img, data_param, expected_output):\n res = Spacing(**init_param)(img, **data_param)\n np.testing.assert_allclose(res[0], expected_output, atol=1e-6)\n sr = len(res[0].shape) - 1\n if isinstance(init_param[\"pixdim\"], float):\n init_param[\"pixdim\"] = [init_param[\"pixdim\"]] * sr\n init_pixdim = ensure_tuple(init_param[\"pixdim\"])\n init_pixdim = init_param[\"pixdim\"][:sr]\n np.testing.assert_allclose(init_pixdim[:sr], np.sqrt(np.sum(np.square(res[2]), axis=0))[:sr])\n\n def test_ill_pixdim(self):\n with self.assertRaises(ValueError):\n Spacing(pixdim=(-1, 2.0))(np.zeros((1, 1)))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_unittest_TestSpacingDCase.test_spacingd_3d.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_unittest_TestSpacingDCase.test_spacingd_3d.None_2", "embedding": null, "metadata": {"file_path": "tests/test_spacingd.py", "file_name": "test_spacingd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 26, "span_ids": ["TestSpacingDCase.test_spacingd_3d", "TestSpacingDCase", "docstring"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\n\nfrom monai.transforms import Spacingd\n\n\nclass TestSpacingDCase(unittest.TestCase):\n def test_spacingd_3d(self):\n data = {\"image\": np.ones((2, 10, 15, 20)), \"image_meta_dict\": {\"affine\": np.eye(4)}}\n spacing = Spacingd(keys=\"image\", pixdim=(1, 2, 1.4))\n res = spacing(data)\n self.assertEqual((\"image\", \"image_meta_dict\", \"image_transforms\"), tuple(sorted(res)))\n np.testing.assert_allclose(res[\"image\"].shape, (2, 10, 8, 15))\n np.testing.assert_allclose(res[\"image_meta_dict\"][\"affine\"], np.diag([1, 2, 1.4, 1.0]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_spacingd_2d_TestSpacingDCase.test_spacingd_2d.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_spacingd_2d_TestSpacingDCase.test_spacingd_2d.None_2", "embedding": null, "metadata": {"file_path": "tests/test_spacingd.py", "file_name": "test_spacingd.py", "file_type": "text/x-python", "category": "test", "start_line": 28, "end_line": 34, "span_ids": ["TestSpacingDCase.test_spacingd_2d"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSpacingDCase(unittest.TestCase):\n\n def test_spacingd_2d(self):\n data = {\"image\": np.ones((2, 10, 20)), \"image_meta_dict\": {\"affine\": np.eye(3)}}\n spacing = Spacingd(keys=\"image\", pixdim=(1, 2, 1.4))\n res = spacing(data)\n self.assertEqual((\"image\", \"image_meta_dict\", \"image_transforms\"), tuple(sorted(res)))\n np.testing.assert_allclose(res[\"image\"].shape, (2, 10, 10))\n np.testing.assert_allclose(res[\"image_meta_dict\"][\"affine\"], np.diag((1, 2, 1)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_interp_all_TestSpacingDCase.test_interp_all.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_interp_all_TestSpacingDCase.test_interp_all.None_2", "embedding": null, "metadata": {"file_path": "tests/test_spacingd.py", "file_name": "test_spacingd.py", "file_type": "text/x-python", "category": "test", "start_line": 36, "end_line": 57, "span_ids": ["TestSpacingDCase.test_interp_all"], "tokens": 219}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSpacingDCase(unittest.TestCase):\n\n def test_interp_all(self):\n data = {\n \"image\": np.arange(20).reshape((2, 1, 10)),\n \"seg\": np.ones((2, 1, 10)),\n \"image_meta_dict\": {\"affine\": np.eye(4)},\n \"seg_meta_dict\": {\"affine\": np.eye(4)},\n }\n spacing = Spacingd(\n keys=(\"image\", \"seg\"),\n mode=\"nearest\",\n pixdim=(\n 1,\n 0.2,\n ),\n )\n res = spacing(data)\n self.assertEqual(\n (\"image\", \"image_meta_dict\", \"image_transforms\", \"seg\", \"seg_meta_dict\", \"seg_transforms\"),\n tuple(sorted(res)),\n )\n np.testing.assert_allclose(res[\"image\"].shape, (2, 1, 46))\n np.testing.assert_allclose(res[\"image_meta_dict\"][\"affine\"], np.diag((1, 0.2, 1, 1)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_interp_sep_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spacingd.py_TestSpacingDCase.test_interp_sep_", "embedding": null, "metadata": {"file_path": "tests/test_spacingd.py", "file_name": "test_spacingd.py", "file_type": "text/x-python", "category": "test", "start_line": 59, "end_line": 85, "span_ids": ["TestSpacingDCase.test_interp_sep", "impl"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSpacingDCase(unittest.TestCase):\n\n def test_interp_sep(self):\n data = {\n \"image\": np.ones((2, 1, 10)),\n \"seg\": np.ones((2, 1, 10)),\n \"image_meta_dict\": {\"affine\": np.eye(4)},\n \"seg_meta_dict\": {\"affine\": np.eye(4)},\n }\n spacing = Spacingd(\n keys=(\"image\", \"seg\"),\n mode=(\"bilinear\", \"nearest\"),\n pixdim=(\n 1,\n 0.2,\n ),\n )\n res = spacing(data)\n self.assertEqual(\n (\"image\", \"image_meta_dict\", \"image_transforms\", \"seg\", \"seg_meta_dict\", \"seg_transforms\"),\n tuple(sorted(res)),\n )\n np.testing.assert_allclose(res[\"image\"].shape, (2, 1, 46))\n np.testing.assert_allclose(res[\"image_meta_dict\"][\"affine\"], np.diag((1, 0.2, 1, 1)))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_cropd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_cropd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_spatial_cropd.py", "file_name": "test_spatial_cropd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 57, "span_ids": ["TestSpatialCropd.test_shape", "impl:3", "TestSpatialCropd", "docstring"], "tokens": 500}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import SpatialCropd\n\nTEST_CASES = [\n [\n {\"keys\": [\"img\"], \"roi_center\": [1, 1, 1], \"roi_size\": [2, 2, 2]},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 2, 2, 2),\n ],\n [\n {\"keys\": [\"img\"], \"roi_start\": [0, 0, 0], \"roi_end\": [2, 2, 2]},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 2, 2, 2),\n ],\n [\n {\"keys\": [\"img\"], \"roi_start\": [0, 0], \"roi_end\": [2, 2]},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 2, 2, 3),\n ],\n [\n {\"keys\": [\"img\"], \"roi_start\": [0, 0, 0, 0, 0], \"roi_end\": [2, 2, 2, 2, 2]},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 2, 2, 2),\n ],\n [\n {\"keys\": [\"img\"], \"roi_slices\": [slice(s, e) for s, e in zip([-1, -2, 0], [None, None, 2])]},\n {\"img\": np.random.randint(0, 2, size=[3, 3, 3, 3])},\n (3, 1, 2, 2),\n ],\n]\n\n\nclass TestSpatialCropd(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_shape(self, input_param, input_data, expected_shape):\n result = SpatialCropd(**input_param)(input_data)\n self.assertTupleEqual(result[\"img\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_pad.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_pad.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_spatial_pad.py", "file_name": "test_spatial_pad.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 50, "span_ids": ["TestSpatialPad.test_pad_shape", "TestSpatialPad", "impl:7", "docstring"], "tokens": 331}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import SpatialPad\n\nTEST_CASE_1 = [\n {\"spatial_size\": [15, 8, 8], \"method\": \"symmetric\", \"mode\": \"constant\"},\n np.zeros((3, 8, 8, 4)),\n np.zeros((3, 15, 8, 8)),\n]\n\nTEST_CASE_2 = [\n {\"spatial_size\": [15, 8, 8], \"method\": \"end\", \"mode\": \"constant\"},\n np.zeros((3, 8, 8, 4)),\n np.zeros((3, 15, 8, 8)),\n]\n\nTEST_CASE_3 = [\n {\"spatial_size\": [15, 4, -1], \"method\": \"symmetric\", \"mode\": \"constant\"},\n np.zeros((3, 8, 8, 4)),\n np.zeros((3, 15, 8, 4)),\n]\n\n\nclass TestSpatialPad(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_pad_shape(self, input_param, input_data, expected_val):\n padder = SpatialPad(**input_param)\n result = padder(input_data)\n np.testing.assert_allclose(result.shape, expected_val.shape)\n result = padder(input_data, mode=input_param[\"mode\"])\n np.testing.assert_allclose(result.shape, expected_val.shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_padd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_spatial_padd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_spatial_padd.py", "file_name": "test_spatial_padd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 54, "span_ids": ["impl:9", "TestSpatialPadd.test_pad_shape", "TestSpatialPadd", "docstring"], "tokens": 418}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import SpatialPadd\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\"], \"spatial_size\": [15, 8, 8], \"method\": \"symmetric\", \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 15, 8, 8)),\n]\n\nTEST_CASE_2 = [\n {\"keys\": [\"img\"], \"spatial_size\": [15, 8, 8], \"method\": \"end\", \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 15, 8, 8)),\n]\n\nTEST_CASE_3 = [\n {\"keys\": [\"img\"], \"spatial_size\": [15, 8, 8], \"method\": \"end\", \"mode\": {\"constant\"}},\n {\"img\": np.zeros((3, 8, 8, 4))},\n np.zeros((3, 15, 8, 8)),\n]\n\nTEST_CASE_4 = [\n {\"keys\": [\"img\"], \"spatial_size\": [15, 8, -1], \"method\": \"end\", \"mode\": {\"constant\"}},\n {\"img\": np.zeros((3, 8, 4, 4))},\n np.zeros((3, 15, 8, 4)),\n]\n\n\nclass TestSpatialPadd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4])\n def test_pad_shape(self, input_param, input_data, expected_val):\n padder = SpatialPadd(**input_param)\n result = padder(input_data)\n np.testing.assert_allclose(result[\"img\"].shape, expected_val.shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_split_channel.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_split_channel.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_split_channel.py", "file_name": "test_split_channel.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 43, "span_ids": ["TestSplitChannel", "TestSplitChannel.test_shape", "impl:13", "docstring"], "tokens": 362}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import SplitChannel\n\nTEST_CASE_1 = [{\"channel_dim\": None}, torch.randint(0, 2, size=(4, 3, 3, 4)), (4, 1, 3, 4)]\n\nTEST_CASE_2 = [{\"channel_dim\": 1}, torch.randint(0, 2, size=(4, 3, 3, 4)), (4, 1, 3, 4)]\n\nTEST_CASE_3 = [{\"channel_dim\": None}, np.random.randint(2, size=(3, 3, 4)), (1, 3, 4)]\n\nTEST_CASE_4 = [{\"channel_dim\": 0}, np.random.randint(2, size=(3, 3, 4)), (1, 3, 4)]\n\nTEST_CASE_5 = [{\"channel_dim\": 2}, np.random.randint(2, size=(3, 2, 4)), (3, 2, 1)]\n\nTEST_CASE_6 = [{\"channel_dim\": -1}, np.random.randint(2, size=(3, 2, 4)), (3, 2, 1)]\n\n\nclass TestSplitChannel(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5, TEST_CASE_6])\n def test_shape(self, input_param, test_data, expected_shape):\n result = SplitChannel(**input_param)(test_data)\n for data in result:\n self.assertTupleEqual(data.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_split_channeld.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_split_channeld.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_split_channeld.py", "file_name": "test_split_channeld.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 74, "span_ids": ["TestSplitChanneld.test_shape", "impl:11", "docstring", "TestSplitChanneld", "impl:15"], "tokens": 639}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import SplitChanneld\n\nTEST_CASE_1 = [\n {\"keys\": \"pred\", \"output_postfixes\": [\"cls1\", \"cls2\", \"cls3\"], \"channel_dim\": None},\n {\"pred\": torch.randint(0, 2, size=(4, 3, 3, 4))},\n (4, 1, 3, 4),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"pred\", \"output_postfixes\": [\"cls1\", \"cls2\", \"cls3\"], \"channel_dim\": 1},\n {\"pred\": torch.randint(0, 2, size=(4, 3, 3, 4))},\n (4, 1, 3, 4),\n]\n\nTEST_CASE_3 = [\n {\"keys\": \"pred\", \"output_postfixes\": [\"cls1\", \"cls2\", \"cls3\"], \"channel_dim\": None},\n {\"pred\": np.random.randint(2, size=(3, 3, 4))},\n (1, 3, 4),\n]\n\nTEST_CASE_4 = [\n {\"keys\": \"pred\", \"output_postfixes\": [\"cls1\", \"cls2\", \"cls3\"], \"channel_dim\": 0},\n {\"pred\": np.random.randint(2, size=(3, 3, 4))},\n (1, 3, 4),\n]\n\nTEST_CASE_5 = [\n {\"keys\": \"pred\", \"output_postfixes\": [\"cls1\", \"cls2\", \"cls3\", \"cls4\"], \"channel_dim\": 2},\n {\"pred\": np.random.randint(2, size=(3, 2, 4))},\n (3, 2, 1),\n]\n\nTEST_CASE_6 = [\n {\"keys\": \"pred\", \"output_postfixes\": [\"cls1\", \"cls2\", \"cls3\", \"cls4\"], \"channel_dim\": -1},\n {\"pred\": np.random.randint(2, size=(3, 2, 4))},\n (3, 2, 1),\n]\n\nTEST_CASE_7 = [\n {\"keys\": \"pred\", \"channel_dim\": 1},\n {\"pred\": np.random.randint(2, size=(3, 2, 4))},\n (3, 1, 4),\n]\n\n\nclass TestSplitChanneld(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5, TEST_CASE_6, TEST_CASE_7])\n def test_shape(self, input_param, test_data, expected_shape):\n result = SplitChanneld(**input_param)(test_data)\n for k, v in result.items():\n if \"_\" in k:\n self.assertTupleEqual(v.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedim.py_TestSqueezeDim_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedim.py_TestSqueezeDim_", "embedding": null, "metadata": {"file_path": "tests/test_squeezedim.py", "file_name": "test_squeezedim.py", "file_type": "text/x-python", "category": "test", "start_line": 35, "end_line": 49, "span_ids": ["impl:15", "TestSqueezeDim.test_invalid_inputs", "TestSqueezeDim.test_shape", "TestSqueezeDim"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSqueezeDim(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_4_PT])\n def test_shape(self, input_param, test_data, expected_shape):\n result = SqueezeDim(**input_param)(test_data)\n self.assertTupleEqual(result.shape, expected_shape)\n\n @parameterized.expand([TEST_CASE_5, TEST_CASE_6])\n def test_invalid_inputs(self, exception, input_param, test_data):\n with self.assertRaises(exception):\n SqueezeDim(**input_param)(test_data)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedimd.py_unittest_TEST_CASE_6._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedimd.py_unittest_TEST_CASE_6._", "embedding": null, "metadata": {"file_path": "tests/test_squeezedimd.py", "file_name": "test_squeezedimd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 60, "span_ids": ["impl:11", "docstring"], "tokens": 582}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import SqueezeDimd\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\", \"seg\"], \"dim\": None},\n {\"img\": np.random.rand(1, 2, 1, 3), \"seg\": np.random.randint(0, 2, size=[1, 2, 1, 3])},\n (2, 3),\n]\n\nTEST_CASE_2 = [\n {\"keys\": [\"img\", \"seg\"], \"dim\": 2},\n {\"img\": np.random.rand(1, 2, 1, 8, 16), \"seg\": np.random.randint(0, 2, size=[1, 2, 1, 8, 16])},\n (1, 2, 8, 16),\n]\n\nTEST_CASE_3 = [\n {\"keys\": [\"img\", \"seg\"], \"dim\": -1},\n {\"img\": np.random.rand(1, 1, 16, 8, 1), \"seg\": np.random.randint(0, 2, size=[1, 1, 16, 8, 1])},\n (1, 1, 16, 8),\n]\n\nTEST_CASE_4 = [\n {\"keys\": [\"img\", \"seg\"]},\n {\"img\": np.random.rand(1, 2, 1, 3), \"seg\": np.random.randint(0, 2, size=[1, 2, 1, 3])},\n (2, 1, 3),\n]\n\nTEST_CASE_4_PT = [\n {\"keys\": [\"img\", \"seg\"], \"dim\": 0},\n {\"img\": torch.rand(1, 2, 1, 3), \"seg\": torch.randint(0, 2, size=[1, 2, 1, 3])},\n (2, 1, 3),\n]\n\nTEST_CASE_5 = [\n ValueError,\n {\"keys\": [\"img\", \"seg\"], \"dim\": -2},\n {\"img\": np.random.rand(1, 1, 16, 8, 1), \"seg\": np.random.randint(0, 2, size=[1, 1, 16, 8, 1])},\n]\n\nTEST_CASE_6 = [\n TypeError,\n {\"keys\": [\"img\", \"seg\"], \"dim\": 0.5},\n {\"img\": np.random.rand(1, 1, 16, 8, 1), \"seg\": np.random.randint(0, 2, size=[1, 1, 16, 8, 1])},\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedimd.py_TestSqueezeDim_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedimd.py_TestSqueezeDim_", "embedding": null, "metadata": {"file_path": "tests/test_squeezedimd.py", "file_name": "test_squeezedimd.py", "file_type": "text/x-python", "category": "test", "start_line": 63, "end_line": 78, "span_ids": ["impl:15", "TestSqueezeDim.test_invalid_inputs", "TestSqueezeDim.test_shape", "TestSqueezeDim"], "tokens": 158}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSqueezeDim(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_4_PT])\n def test_shape(self, input_param, test_data, expected_shape):\n result = SqueezeDimd(**input_param)(test_data)\n self.assertTupleEqual(result[\"img\"].shape, expected_shape)\n self.assertTupleEqual(result[\"seg\"].shape, expected_shape)\n\n @parameterized.expand([TEST_CASE_5, TEST_CASE_6])\n def test_invalid_inputs(self, exception, input_param, test_data):\n with self.assertRaises(exception):\n SqueezeDimd(**input_param)(test_data)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensity.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensity.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_threshold_intensity.py", "file_name": "test_threshold_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 36, "span_ids": ["TestThresholdIntensity.test_value", "TestThresholdIntensity", "impl:7", "docstring"], "tokens": 267}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import ThresholdIntensity\n\nTEST_CASE_1 = [{\"threshold\": 5, \"above\": True, \"cval\": 0}, (0, 0, 0, 0, 0, 0, 6, 7, 8, 9)]\n\nTEST_CASE_2 = [{\"threshold\": 5, \"above\": False, \"cval\": 0}, (0, 1, 2, 3, 4, 0, 0, 0, 0, 0)]\n\nTEST_CASE_3 = [{\"threshold\": 5, \"above\": True, \"cval\": 5}, (5, 5, 5, 5, 5, 5, 6, 7, 8, 9)]\n\n\nclass TestThresholdIntensity(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, input_param, expected_value):\n test_data = np.arange(10)\n result = ThresholdIntensity(**input_param)(test_data)\n np.testing.assert_allclose(result, expected_value)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensityd.py_unittest_TEST_CASE_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensityd.py_unittest_TEST_CASE_3._", "embedding": null, "metadata": {"file_path": "tests/test_threshold_intensityd.py", "file_name": "test_threshold_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 32, "span_ids": ["docstring"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import ThresholdIntensityd\n\nTEST_CASE_1 = [\n {\"keys\": [\"image\", \"label\", \"extra\"], \"threshold\": 5, \"above\": True, \"cval\": 0},\n (0, 0, 0, 0, 0, 0, 6, 7, 8, 9),\n]\n\nTEST_CASE_2 = [\n {\"keys\": [\"image\", \"label\", \"extra\"], \"threshold\": 5, \"above\": False, \"cval\": 0},\n (0, 1, 2, 3, 4, 0, 0, 0, 0, 0),\n]\n\nTEST_CASE_3 = [\n {\"keys\": [\"image\", \"label\", \"extra\"], \"threshold\": 5, \"above\": True, \"cval\": 5},\n (5, 5, 5, 5, 5, 5, 6, 7, 8, 9),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensityd.py_TestThresholdIntensityd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_threshold_intensityd.py_TestThresholdIntensityd_", "embedding": null, "metadata": {"file_path": "tests/test_threshold_intensityd.py", "file_name": "test_threshold_intensityd.py", "file_type": "text/x-python", "category": "test", "start_line": 33, "end_line": 45, "span_ids": ["TestThresholdIntensityd", "TestThresholdIntensityd.test_value", "impl:7"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestThresholdIntensityd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, input_param, expected_value):\n test_data = {\"image\": np.arange(10), \"label\": np.arange(10), \"extra\": np.arange(10)}\n result = ThresholdIntensityd(**input_param)(test_data)\n np.testing.assert_allclose(result[\"image\"], expected_value)\n np.testing.assert_allclose(result[\"label\"], expected_value)\n np.testing.assert_allclose(result[\"extra\"], expected_value)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_numpy.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_numpy.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_to_numpy.py", "file_name": "test_to_numpy.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 50, "span_ids": ["TestToNumpy", "impl", "TestToNumpy.test_list_tuple", "TestToNumpy.test_tensor_input", "TestToNumpy.test_numpy_input", "docstring"], "tokens": 301}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.transforms import ToNumpy\n\n\nclass TestToNumpy(unittest.TestCase):\n def test_numpy_input(self):\n test_data = np.array([[1, 2], [3, 4]])\n test_data = np.rot90(test_data)\n self.assertFalse(test_data.flags[\"C_CONTIGUOUS\"])\n result = ToNumpy()(test_data)\n self.assertTrue(isinstance(result, np.ndarray))\n self.assertTrue(result.flags[\"C_CONTIGUOUS\"])\n np.testing.assert_allclose(result, test_data)\n\n def test_tensor_input(self):\n test_data = torch.tensor([[1, 2], [3, 4]])\n test_data = test_data.rot90()\n self.assertFalse(test_data.is_contiguous())\n result = ToNumpy()(test_data)\n self.assertTrue(isinstance(result, np.ndarray))\n self.assertTrue(result.flags[\"C_CONTIGUOUS\"])\n np.testing.assert_allclose(result, test_data.numpy())\n\n def test_list_tuple(self):\n test_data = [[1, 2], [3, 4]]\n result = ToNumpy()(test_data)\n np.testing.assert_allclose(result, np.asarray(test_data))\n test_data = ((1, 2), (3, 4))\n result = ToNumpy()(test_data)\n np.testing.assert_allclose(result, np.asarray(test_data))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_numpyd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_numpyd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_to_numpyd.py", "file_name": "test_to_numpyd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 42, "span_ids": ["impl", "TestToNumpyd.test_tensor_input", "TestToNumpyd.test_numpy_input", "TestToNumpyd", "docstring"], "tokens": 238}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.transforms import ToNumpyd\n\n\nclass TestToNumpyd(unittest.TestCase):\n def test_numpy_input(self):\n test_data = np.array([[1, 2], [3, 4]])\n test_data = np.rot90(test_data)\n self.assertFalse(test_data.flags[\"C_CONTIGUOUS\"])\n result = ToNumpyd(keys=\"img\")({\"img\": test_data})[\"img\"]\n self.assertTrue(isinstance(result, np.ndarray))\n self.assertTrue(result.flags[\"C_CONTIGUOUS\"])\n np.testing.assert_allclose(result, test_data)\n\n def test_tensor_input(self):\n test_data = torch.tensor([[1, 2], [3, 4]])\n test_data = test_data.rot90()\n self.assertFalse(test_data.is_contiguous())\n result = ToNumpyd(keys=\"img\")({\"img\": test_data})[\"img\"]\n self.assertTrue(isinstance(result, np.ndarray))\n self.assertTrue(result.flags[\"C_CONTIGUOUS\"])\n np.testing.assert_allclose(result, test_data.numpy())\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_onehot.py_unittest_TEST_CASE_4._no_channel_0D_batch": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_onehot.py_unittest_TEST_CASE_4._no_channel_0D_batch", "embedding": null, "metadata": {"file_path": "tests/test_to_onehot.py", "file_name": "test_to_onehot.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["docstring"], "tokens": 437}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import one_hot\n\nTEST_CASE_1 = [ # single channel 2D, batch 3, shape (2, 1, 2, 2)\n {\"labels\": torch.tensor([[[[0, 1], [1, 2]]], [[[2, 1], [1, 0]]]]), \"num_classes\": 3},\n (2, 3, 2, 2),\n]\n\nTEST_CASE_2 = [ # single channel 1D, batch 2, shape (2, 1, 4)\n {\"labels\": torch.tensor([[[1, 2, 2, 0]], [[2, 1, 0, 1]]]), \"num_classes\": 3},\n (2, 3, 4),\n np.array([[[0, 0, 0, 1], [1, 0, 0, 0], [0, 1, 1, 0]], [[0, 0, 1, 0], [0, 1, 0, 1], [1, 0, 0, 0]]]),\n]\n\nTEST_CASE_3 = [ # single channel 0D, batch 2, shape (2, 1)\n {\"labels\": torch.tensor([[1.0], [2.0]]), \"num_classes\": 3},\n (2, 3),\n np.array([[0, 1, 0], [0, 0, 1]]),\n]\n\nTEST_CASE_4 = [ # no channel 0D, batch 3, shape (3)\n {\"labels\": torch.tensor([1, 2, 0]), \"num_classes\": 3, \"dtype\": torch.long},\n (3, 3),\n np.array([[0, 1, 0], [0, 0, 1], [1, 0, 0]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_onehot.py_TestToOneHot_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_to_onehot.py_TestToOneHot_", "embedding": null, "metadata": {"file_path": "tests/test_to_onehot.py", "file_name": "test_to_onehot.py", "file_type": "text/x-python", "category": "test", "start_line": 44, "end_line": 61, "span_ids": ["impl:9", "TestToOneHot.test_shape", "TestToOneHot"], "tokens": 140}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestToOneHot(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4])\n def test_shape(self, input_data, expected_shape, expected_result=None):\n result = one_hot(**input_data)\n self.assertEqual(result.shape, expected_shape)\n if expected_result is not None:\n self.assertTrue(np.allclose(expected_result, result.numpy()))\n\n if \"dtype\" in input_data:\n self.assertEqual(result.dtype, input_data[\"dtype\"])\n else:\n # by default, expecting float type\n self.assertEqual(result.dtype, torch.float)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_TestTverskyLoss_TestTverskyLoss.test_ill_shape.None_2.TverskyLoss_reduction_Non": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_TestTverskyLoss_TestTverskyLoss.test_ill_shape.None_2.TverskyLoss_reduction_Non", "embedding": null, "metadata": {"file_path": "tests/test_tversky_loss.py", "file_name": "test_tversky_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 105, "end_line": 120, "span_ids": ["TestTverskyLoss.test_ill_shape", "TestTverskyLoss", "TestTverskyLoss.test_shape"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTverskyLoss(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_shape(self, input_param, input_data, expected_val):\n result = TverskyLoss(**input_param).forward(**input_data)\n np.testing.assert_allclose(result.detach().cpu().numpy(), expected_val, rtol=1e-4)\n\n def test_ill_shape(self):\n loss = TverskyLoss()\n with self.assertRaisesRegex(AssertionError, \"\"):\n loss.forward(torch.ones((2, 2, 3)), torch.ones((4, 5, 6)))\n chn_input = torch.ones((1, 1, 3))\n chn_target = torch.ones((1, 1, 3))\n with self.assertRaisesRegex(ValueError, \"\"):\n TverskyLoss(reduction=\"unknown\")(chn_input, chn_target)\n with self.assertRaisesRegex(ValueError, \"\"):\n TverskyLoss(reduction=None)(chn_input, chn_target)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_TestTverskyLoss.test_input_warnings_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_TestTverskyLoss.test_input_warnings_", "embedding": null, "metadata": {"file_path": "tests/test_tversky_loss.py", "file_name": "test_tversky_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 174, "end_line": 196, "span_ids": ["impl:3", "TestTverskyLoss.test_input_warnings", "TestTverskyLoss.test_script"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestTverskyLoss(unittest.TestCase):\n\n def test_input_warnings(self):\n chn_input = torch.ones((1, 1, 3))\n chn_target = torch.ones((1, 1, 3))\n with self.assertWarns(Warning):\n loss = TverskyLoss(include_background=False)\n loss.forward(chn_input, chn_target)\n with self.assertWarns(Warning):\n loss = TverskyLoss(softmax=True)\n loss.forward(chn_input, chn_target)\n with self.assertWarns(Warning):\n loss = TverskyLoss(to_onehot_y=True)\n loss.forward(chn_input, chn_target)\n\n @SkipIfBeforePyTorchVersion((1, 7, 0))\n def test_script(self):\n loss = TverskyLoss()\n test_input = torch.ones(2, 1, 8, 8)\n test_script_save(loss, test_input, test_input)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_unittest_INVALID_CASES._None_None_bilinear": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_unittest_INVALID_CASES._None_None_bilinear", "embedding": null, "metadata": {"file_path": "tests/test_zoom.py", "file_name": "test_zoom.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 23, "span_ids": ["docstring"], "tokens": 112}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\nfrom scipy.ndimage import zoom as zoom_scipy\n\nfrom monai.transforms import Zoom\nfrom tests.utils import NumpyImageTestCase2D\n\nVALID_CASES = [(1.5, \"nearest\"), (1.5, \"nearest\"), (0.8, \"bilinear\"), (0.8, \"area\")]\n\nINVALID_CASES = [((None, None), \"bilinear\", TypeError), ((0.9, 0.9), \"s\", ValueError)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom_TestZoom.test_correct_results.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom_TestZoom.test_correct_results.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_zoom.py", "file_name": "test_zoom.py", "file_type": "text/x-python", "category": "test", "start_line": 26, "end_line": 38, "span_ids": ["TestZoom", "TestZoom.test_correct_results"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestZoom(NumpyImageTestCase2D):\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, zoom, mode):\n zoom_fn = Zoom(zoom=zoom, mode=mode, keep_size=False)\n zoomed = zoom_fn(self.imt[0])\n _order = 0\n if mode.endswith(\"linear\"):\n _order = 1\n expected = []\n for channel in self.imt[0]:\n expected.append(zoom_scipy(channel, zoom=zoom, mode=\"nearest\", order=_order, prefilter=False))\n expected = np.stack(expected).astype(np.float32)\n np.testing.assert_allclose(zoomed, expected, atol=1.0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_unittest_VALID_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_unittest_VALID_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_zoom_affine.py", "file_name": "test_zoom_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 47, "span_ids": ["docstring"], "tokens": 643}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport nibabel as nib\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.data.utils import zoom_affine\n\nVALID_CASES = [\n (\n np.array([[2, 1, 4], [-1, -3, 5], [0, 0, 1]]),\n (10, 20, 30),\n np.array([[8.94427191, -8.94427191, 0], [-4.47213595, -17.88854382, 0], [0.0, 0.0, 1.0]]),\n ),\n (\n np.array([[1, 0, 0, 4], [0, 2, 0, 5], [0, 0, 3, 6], [0, 0, 0, 1]]),\n (10, 20, 30),\n np.array([[10, 0, 0, 0], [0, 20, 0, 0], [0, 0, 30, 0], [0, 0, 0, 1]]),\n ),\n (\n np.array([[1, 0, 0, 4], [0, 2, 0, 5], [0, 0, 3, 6], [0, 0, 0, 1]]),\n (10, 20),\n np.array([[10, 0, 0, 0], [0, 20, 0, 0], [0, 0, 3, 0], [0, 0, 0, 1]]),\n ),\n (\n np.array([[1, 0, 0, 4], [0, 2, 0, 5], [0, 0, 3, 6], [0, 0, 0, 1]]),\n (10,),\n np.array([[10, 0, 0, 0], [0, 2, 0, 0], [0, 0, 3, 0], [0, 0, 0, 1]]),\n ),\n (\n [[1, 0, 10], [0, 1, 20], [0, 0, 1]]\n @ ([[0, -1, 0], [1, 0, 0], [0, 0, 1]] @ np.array([[2, 0.3, 0], [0, 3, 0], [0, 0, 1]])),\n (4, 5, 6),\n ([[0, -1, 0], [1, 0, 0], [0, 0, 1]] @ np.array([[4, 0, 0], [0, 5, 0], [0, 0, 1]])),\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_DIAGONAL_CASES_DIAGONAL_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_DIAGONAL_CASES_DIAGONAL_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_zoom_affine.py", "file_name": "test_zoom_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 49, "end_line": 61, "span_ids": ["impl:3"], "tokens": 291}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "DIAGONAL_CASES = [\n (\n np.array([[-1, 0, 0, 4], [0, 2, 0, 5], [0, 0, 3, 6], [0, 0, 0, 1]]),\n (10, 20, 30),\n np.array([[10, 0, 0, 0], [0, 20, 0, 0], [0, 0, 30, 0], [0, 0, 0, 1]]),\n ),\n (np.array([[2, 1, 4], [-1, -3, 5], [0, 0, 1]]), (10, 20, 30), np.array([[10, 0, 0], [0, 20, 0], [0.0, 0.0, 1.0]])),\n ( # test default scale from affine\n np.array([[2, 1, 4], [-1, -3, 5], [0, 0, 1]]),\n (10,),\n np.array([[10, 0, 0], [0, 3.162278, 0], [0.0, 0.0, 1.0]]),\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_TestZoomAffine_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom_affine.py_TestZoomAffine_", "embedding": null, "metadata": {"file_path": "tests/test_zoom_affine.py", "file_name": "test_zoom_affine.py", "file_type": "text/x-python", "category": "test", "start_line": 64, "end_line": 81, "span_ids": ["TestZoomAffine.test_diagonal", "TestZoomAffine.test_correct", "impl:5", "TestZoomAffine"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestZoomAffine(unittest.TestCase):\n @parameterized.expand(VALID_CASES)\n def test_correct(self, affine, scale, expected):\n output = zoom_affine(affine, scale, diagonal=False)\n ornt_affine = nib.orientations.ornt2axcodes(nib.orientations.io_orientation(output))\n ornt_output = nib.orientations.ornt2axcodes(nib.orientations.io_orientation(affine))\n np.testing.assert_array_equal(ornt_affine, ornt_output)\n np.testing.assert_allclose(output, expected, rtol=1e-6, atol=1e-6)\n\n @parameterized.expand(DIAGONAL_CASES)\n def test_diagonal(self, affine, scale, expected):\n output = zoom_affine(affine, scale, diagonal=True)\n np.testing.assert_allclose(output, expected, rtol=1e-6, atol=1e-6)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_unittest_INVALID_CASES._no_zoom_None_bilin": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_unittest_INVALID_CASES._no_zoom_None_bilin", "embedding": null, "metadata": {"file_path": "tests/test_zoomd.py", "file_name": "test_zoomd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 23, "span_ids": ["docstring"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\nfrom scipy.ndimage import zoom as zoom_scipy\n\nfrom monai.transforms import Zoomd\nfrom tests.utils import NumpyImageTestCase2D\n\nVALID_CASES = [(1.5, \"nearest\", False), (0.3, \"bilinear\", False), (0.8, \"bilinear\", False)]\n\nINVALID_CASES = [(\"no_zoom\", None, \"bilinear\", TypeError), (\"invalid_order\", 0.9, \"s\", ValueError)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_TestZoomd_TestZoomd.test_correct_results.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_TestZoomd_TestZoomd.test_correct_results.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_zoomd.py", "file_name": "test_zoomd.py", "file_type": "text/x-python", "category": "test", "start_line": 26, "end_line": 44, "span_ids": ["TestZoomd.test_correct_results", "TestZoomd"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestZoomd(NumpyImageTestCase2D):\n @parameterized.expand(VALID_CASES)\n def test_correct_results(self, zoom, mode, keep_size):\n key = \"img\"\n zoom_fn = Zoomd(\n key,\n zoom=zoom,\n mode=mode,\n keep_size=keep_size,\n )\n zoomed = zoom_fn({key: self.imt[0]})\n _order = 0\n if mode.endswith(\"linear\"):\n _order = 1\n expected = []\n for channel in self.imt[0]:\n expected.append(zoom_scipy(channel, zoom=zoom, mode=\"nearest\", order=_order, prefilter=False))\n expected = np.stack(expected).astype(np.float32)\n np.testing.assert_allclose(expected, zoomed[key], atol=1.0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_TestZoomd.test_keep_size_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoomd.py_TestZoomd.test_keep_size_", "embedding": null, "metadata": {"file_path": "tests/test_zoomd.py", "file_name": "test_zoomd.py", "file_type": "text/x-python", "category": "test", "start_line": 42, "end_line": 62, "span_ids": ["TestZoomd.test_keep_size", "impl:5", "TestZoomd.test_invalid_inputs"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestZoomd(NumpyImageTestCase2D):\n\n def test_keep_size(self):\n key = \"img\"\n zoom_fn = Zoomd(key, zoom=0.6, keep_size=True)\n zoomed = zoom_fn({key: self.imt[0]})\n self.assertTrue(np.array_equal(zoomed[key].shape, self.imt.shape[1:]))\n\n zoom_fn = Zoomd(key, zoom=1.3, keep_size=True)\n zoomed = zoom_fn({key: self.imt[0]})\n self.assertTrue(np.array_equal(zoomed[key].shape, self.imt.shape[1:]))\n\n @parameterized.expand(INVALID_CASES)\n def test_invalid_inputs(self, _, zoom, mode, raises):\n key = \"img\"\n with self.assertRaises(raises):\n zoom_fn = Zoomd(key, zoom=zoom, mode=mode)\n zoom_fn({key: self.imt[0]})\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_make_nifti_image_make_nifti_image.return.image_name": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_make_nifti_image_make_nifti_image.return.image_name", "embedding": null, "metadata": {"file_path": "tests/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 33, "end_line": 45, "span_ids": ["make_nifti_image"], "tokens": 110}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def make_nifti_image(array, affine=None):\n \"\"\"\n Create a temporary nifti image on the disk and return the image name.\n User is responsible for deleting the temporary file when done with it.\n \"\"\"\n if affine is None:\n affine = np.eye(4)\n test_image = nib.Nifti1Image(array, affine)\n\n temp_f, image_name = tempfile.mkstemp(suffix=\".nii.gz\")\n nib.save(test_image, image_name)\n os.close(temp_f)\n return image_name", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_NumpyImageTestCase2D_TorchImageTestCase2D.setUp.self.segn.torch_tensor_self_segn_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_NumpyImageTestCase2D_TorchImageTestCase2D.setUp.self.segn.torch_tensor_self_segn_", "embedding": null, "metadata": {"file_path": "tests/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 48, "end_line": 67, "span_ids": ["NumpyImageTestCase2D", "TorchImageTestCase2D.setUp", "TorchImageTestCase2D", "NumpyImageTestCase2D.setUp"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NumpyImageTestCase2D(unittest.TestCase):\n im_shape = (128, 64)\n input_channels = 1\n output_channels = 4\n num_classes = 3\n\n def setUp(self):\n im, msk = create_test_image_2d(self.im_shape[0], self.im_shape[1], 4, 20, 0, self.num_classes)\n\n self.imt = im[None, None]\n self.seg1 = (msk[None, None] > 0).astype(np.float32)\n self.segn = msk[None, None]\n\n\nclass TorchImageTestCase2D(NumpyImageTestCase2D):\n def setUp(self):\n NumpyImageTestCase2D.setUp(self)\n self.imt = torch.tensor(self.imt)\n self.seg1 = torch.tensor(self.seg1)\n self.segn = torch.tensor(self.segn)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_query_memory_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/utils.py_query_memory_", "embedding": null, "metadata": {"file_path": "tests/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 568, "end_line": 588, "span_ids": ["query_memory", "impl:7"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def query_memory(n=2):\n \"\"\"\n Find best n idle devices and return a string of device ids.\n \"\"\"\n bash_string = \"nvidia-smi --query-gpu=power.draw,temperature.gpu,memory.used --format=csv,noheader,nounits\"\n\n try:\n p1 = Popen(bash_string.split(), stdout=PIPE)\n output, error = p1.communicate()\n free_memory = [x.split(\",\") for x in output.decode(\"utf-8\").split(\"\\n\")[:-1]]\n free_memory = np.asarray(free_memory, dtype=float).T\n free_memory[1] += free_memory[0] # combine 0/1 column measures\n ids = np.lexsort(free_memory)[:n]\n except (FileNotFoundError, TypeError, IndexError):\n ids = range(n) if isinstance(n, int) else []\n return \",\".join([f\"{int(x)}\" for x in ids])\n\n\nif __name__ == \"__main__\":\n print(query_memory())", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_config_from_root_get_config_from_root.return.cfg": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_config_from_root_get_config_from_root.return.cfg", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 328, "end_line": 355, "span_ids": ["get_config_from_root"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_config_from_root(root):\n \"\"\"Read the project setup.cfg file to determine Versioneer config.\"\"\"\n # This might raise EnvironmentError (if setup.cfg is missing), or\n # configparser.NoSectionError (if it lacks a [versioneer] section), or\n # configparser.NoOptionError (if it lacks \"VCS=\"). See the docstring at\n # the top of versioneer.py for instructions on writing your setup.cfg .\n setup_cfg = os.path.join(root, \"setup.cfg\")\n parser = configparser.ConfigParser()\n with open(setup_cfg, \"r\") as f:\n parser.read_file(f)\n VCS = parser.get(\"versioneer\", \"VCS\") # mandatory\n\n def get(parser, name):\n if parser.has_option(\"versioneer\", name):\n return parser.get(\"versioneer\", name)\n return None\n\n cfg = VersioneerConfig()\n cfg.VCS = VCS\n cfg.style = get(parser, \"style\") or \"\"\n cfg.versionfile_source = get(parser, \"versionfile_source\")\n cfg.versionfile_build = get(parser, \"versionfile_build\")\n cfg.tag_prefix = get(parser, \"tag_prefix\")\n if cfg.tag_prefix in (\"''\", '\"\"'):\n cfg.tag_prefix = \"\"\n cfg.parentdir_prefix = get(parser, \"parentdir_prefix\")\n cfg.verbose = get(parser, \"verbose\")\n return cfg", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_NotThisMethod_register_vcs_handler.return.decorate": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_NotThisMethod_register_vcs_handler.return.decorate", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 358, "end_line": 377, "span_ids": ["register_vcs_handler", "NotThisMethod", "impl"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NotThisMethod(Exception):\n \"\"\"Exception raised if a method is not valid for the current scenario.\"\"\"\n\n\n# these dictionaries contain VCS-specific tools\nLONG_VERSION_PY = {}\nHANDLERS = {}\n\n\ndef register_vcs_handler(vcs, method): # decorator\n \"\"\"Create decorator to mark a method as the handler of a VCS.\"\"\"\n\n def decorate(f):\n \"\"\"Store f in HANDLERS[vcs][method].\"\"\"\n if vcs not in HANDLERS:\n HANDLERS[vcs] = {}\n HANDLERS[vcs][method] = f\n return f\n\n return decorate", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_run_command_run_command.return.stdout_p_returncode": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_run_command_run_command.return.stdout_p_returncode", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 380, "end_line": 410, "span_ids": ["run_command"], "tokens": 273}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):\n \"\"\"Call the given command(s).\"\"\"\n assert isinstance(commands, list)\n p = None\n for c in commands:\n try:\n dispcmd = str([c] + args)\n # remember shell=False, so use git.cmd on windows, not just git\n p = subprocess.Popen(\n [c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)\n )\n break\n except EnvironmentError:\n e = sys.exc_info()[1]\n if e.errno == errno.ENOENT:\n continue\n if verbose:\n print(\"unable to run %s\" % dispcmd)\n print(e)\n return None, None\n else:\n if verbose:\n print(\"unable to find command, tried %s\" % (commands,))\n return None, None\n stdout = p.communicate()[0].strip().decode()\n if p.returncode != 0:\n if verbose:\n print(\"unable to run %s (error)\" % dispcmd)\n print(\"stdout was %s\" % stdout)\n return None, p.returncode\n return stdout, p.returncode", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_get_keywords_git_get_keywords.return.keywords": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_get_keywords_git_get_keywords.return.keywords", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 948, "end_line": 974, "span_ids": ["git_get_keywords"], "tokens": 255}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@register_vcs_handler(\"git\", \"get_keywords\")\ndef git_get_keywords(versionfile_abs):\n \"\"\"Extract version information from the given file.\"\"\"\n # the code embedded in _version.py can just fetch the value of these\n # keywords. When used from setup.py, we don't want to import _version.py,\n # so we do it with a regexp instead. This function is not used from\n # _version.py.\n keywords = {}\n try:\n f = open(versionfile_abs, \"r\")\n for line in f.readlines():\n if line.strip().startswith(\"git_refnames =\"):\n mo = re.search(r'=\\s*\"(.*)\"', line)\n if mo:\n keywords[\"refnames\"] = mo.group(1)\n if line.strip().startswith(\"git_full =\"):\n mo = re.search(r'=\\s*\"(.*)\"', line)\n if mo:\n keywords[\"full\"] = mo.group(1)\n if line.strip().startswith(\"git_date =\"):\n mo = re.search(r'=\\s*\"(.*)\"', line)\n if mo:\n keywords[\"date\"] = mo.group(1)\n f.close()\n except EnvironmentError:\n pass\n return keywords", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_versions_from_keywords_git_versions_from_keywords.return._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_versions_from_keywords_git_versions_from_keywords.return._", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 972, "end_line": 1035, "span_ids": ["git_versions_from_keywords"], "tokens": 756}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@register_vcs_handler(\"git\", \"keywords\")\ndef git_versions_from_keywords(keywords, tag_prefix, verbose):\n \"\"\"Get version information from git keywords.\"\"\"\n if not keywords:\n raise NotThisMethod(\"no keywords at all, weird\")\n date = keywords.get(\"date\")\n if date is not None:\n # Use only the last line. Previous lines may contain GPG signature\n # information.\n date = date.splitlines()[-1]\n\n # git-2.2.0 added \"%cI\", which expands to an ISO-8601 -compliant\n # datestamp. However we prefer \"%ci\" (which expands to an \"ISO-8601\n # -like\" string, which we must then edit to make compliant), because\n # it's been around since git-1.5.3, and it's too difficult to\n # discover which version we're using, or to work around using an\n # older one.\n date = date.strip().replace(\" \", \"T\", 1).replace(\" \", \"\", 1)\n refnames = keywords[\"refnames\"].strip()\n if refnames.startswith(\"$Format\"):\n if verbose:\n print(\"keywords are unexpanded, not using\")\n raise NotThisMethod(\"unexpanded keywords, not a git-archive tarball\")\n refs = set([r.strip() for r in refnames.strip(\"()\").split(\",\")])\n # starting in git-1.8.3, tags are listed as \"tag: foo-1.0\" instead of\n # just \"foo-1.0\". If we see a \"tag: \" prefix, prefer those.\n TAG = \"tag: \"\n tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])\n if not tags:\n # Either we're using git < 1.8.3, or there really are no tags. We use\n # a heuristic: assume all version tags have a digit. The old git %d\n # expansion behaves like git log --decorate=short and strips out the\n # refs/heads/ and refs/tags/ prefixes that would let us distinguish\n # between branches and tags. By ignoring refnames without digits, we\n # filter out many common branch names like \"release\" and\n # \"stabilization\", as well as \"HEAD\" and \"master\".\n tags = set([r for r in refs if re.search(r\"\\d\", r)])\n if verbose:\n print(\"discarding '%s', no digits\" % \",\".join(refs - tags))\n if verbose:\n print(\"likely tags: %s\" % \",\".join(sorted(tags)))\n for ref in sorted(tags):\n # sorting will prefer e.g. \"2.0\" over \"2.0rc1\"\n if ref.startswith(tag_prefix):\n r = ref[len(tag_prefix) :]\n if verbose:\n print(\"picking %s\" % r)\n return {\n \"version\": r,\n \"full-revisionid\": keywords[\"full\"].strip(),\n \"dirty\": False,\n \"error\": None,\n \"date\": date,\n }\n # no suitable tags, so version is \"0+unknown\", but full hex is still there\n if verbose:\n print(\"no suitable tags, using unknown + full revision id\")\n return {\n \"version\": \"0+unknown\",\n \"full-revisionid\": keywords[\"full\"].strip(),\n \"dirty\": False,\n \"error\": \"no suitable tags\",\n \"date\": None,\n }", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_pieces_from_vcs_git_pieces_from_vcs.return.pieces": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_git_pieces_from_vcs_git_pieces_from_vcs.return.pieces", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1038, "end_line": 1124, "span_ids": ["git_pieces_from_vcs"], "tokens": 902}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@register_vcs_handler(\"git\", \"pieces_from_vcs\")\ndef git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):\n \"\"\"Get version from 'git describe' in the root of the source tree.\n\n This only gets called if the git-archive 'subst' keywords were *not*\n expanded, and _version.py hasn't already been rewritten with a short\n version string, meaning we're inside a checked out source tree.\n \"\"\"\n GITS = [\"git\"]\n if sys.platform == \"win32\":\n GITS = [\"git.cmd\", \"git.exe\"]\n\n out, rc = run_command(GITS, [\"rev-parse\", \"--git-dir\"], cwd=root, hide_stderr=True)\n if rc != 0:\n if verbose:\n print(\"Directory %s not under git control\" % root)\n raise NotThisMethod(\"'git rev-parse --git-dir' returned error\")\n\n # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]\n # if there isn't one, this yields HEX[-dirty] (no NUM)\n describe_out, rc = run_command(\n GITS, [\"describe\", \"--tags\", \"--dirty\", \"--always\", \"--long\", \"--match\", \"%s*\" % tag_prefix], cwd=root\n )\n # --long was added in git-1.5.5\n if describe_out is None:\n raise NotThisMethod(\"'git describe' failed\")\n describe_out = describe_out.strip()\n full_out, rc = run_command(GITS, [\"rev-parse\", \"HEAD\"], cwd=root)\n if full_out is None:\n raise NotThisMethod(\"'git rev-parse' failed\")\n full_out = full_out.strip()\n\n pieces = {}\n pieces[\"long\"] = full_out\n pieces[\"short\"] = full_out[:7] # maybe improved later\n pieces[\"error\"] = None\n\n # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]\n # TAG might have hyphens.\n git_describe = describe_out\n\n # look for -dirty suffix\n dirty = git_describe.endswith(\"-dirty\")\n pieces[\"dirty\"] = dirty\n if dirty:\n git_describe = git_describe[: git_describe.rindex(\"-dirty\")]\n\n # now we have TAG-NUM-gHEX or HEX\n\n if \"-\" in git_describe:\n # TAG-NUM-gHEX\n mo = re.search(r\"^(.+)-(\\d+)-g([0-9a-f]+)$\", git_describe)\n if not mo:\n # unparseable. Maybe git-describe is misbehaving?\n pieces[\"error\"] = \"unable to parse git-describe output: '%s'\" % describe_out\n return pieces\n\n # tag\n full_tag = mo.group(1)\n if not full_tag.startswith(tag_prefix):\n if verbose:\n fmt = \"tag '%s' doesn't start with prefix '%s'\"\n print(fmt % (full_tag, tag_prefix))\n pieces[\"error\"] = \"tag '%s' doesn't start with prefix '%s'\" % (full_tag, tag_prefix)\n return pieces\n pieces[\"closest-tag\"] = full_tag[len(tag_prefix) :]\n\n # distance: number of commits since tag\n pieces[\"distance\"] = int(mo.group(2))\n\n # commit: short hex revision ID\n pieces[\"short\"] = mo.group(3)\n\n else:\n # HEX: no tags\n pieces[\"closest-tag\"] = None\n count_out, rc = run_command(GITS, [\"rev-list\", \"HEAD\", \"--count\"], cwd=root)\n pieces[\"distance\"] = int(count_out) # total number of commits\n\n # commit date: see ISO-8601 comment in git_versions_from_keywords()\n date = run_command(GITS, [\"show\", \"-s\", \"--format=%ci\", \"HEAD\"], cwd=root)[0].strip()\n # Use only the last line. Previous lines may contain GPG signature\n # information.\n date = date.splitlines()[-1]\n pieces[\"date\"] = date.strip().replace(\" \", \"T\", 1).replace(\" \", \"\", 1)\n\n return pieces", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_do_vcs_install_do_vcs_install.run_command_GITS_add_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_do_vcs_install_do_vcs_install.run_command_GITS_add_", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1125, "end_line": 1160, "span_ids": ["do_vcs_install"], "tokens": 300}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def do_vcs_install(manifest_in, versionfile_source, ipy):\n \"\"\"Git-specific installation logic for Versioneer.\n\n For Git, this means creating/changing .gitattributes to mark _version.py\n for export-subst keyword substitution.\n \"\"\"\n GITS = [\"git\"]\n if sys.platform == \"win32\":\n GITS = [\"git.cmd\", \"git.exe\"]\n files = [manifest_in, versionfile_source]\n if ipy:\n files.append(ipy)\n try:\n me = __file__\n if me.endswith(\".pyc\") or me.endswith(\".pyo\"):\n me = os.path.splitext(me)[0] + \".py\"\n versioneer_file = os.path.relpath(me)\n except NameError:\n versioneer_file = \"versioneer.py\"\n files.append(versioneer_file)\n present = False\n try:\n f = open(\".gitattributes\", \"r\")\n for line in f.readlines():\n if line.strip().startswith(versionfile_source):\n if \"export-subst\" in line.strip().split()[1:]:\n present = True\n f.close()\n except EnvironmentError:\n pass\n if not present:\n f = open(\".gitattributes\", \"a+\")\n f.write(\"%s export-subst\\n\" % versionfile_source)\n f.close()\n files.append(\".gitattributes\")\n run_command(GITS, [\"add\", \"--\"] + files)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_versions_from_parentdir_versions_from_parentdir.raise_NotThisMethod_root": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_versions_from_parentdir_versions_from_parentdir.raise_NotThisMethod_root", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1163, "end_line": 1188, "span_ids": ["versions_from_parentdir"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def versions_from_parentdir(parentdir_prefix, root, verbose):\n \"\"\"Try to determine the version from the parent directory name.\n\n Source tarballs conventionally unpack into a directory that includes both\n the project name and a version string. We will also support searching up\n two directory levels for an appropriately named parent directory\n \"\"\"\n rootdirs = []\n\n for i in range(3):\n dirname = os.path.basename(root)\n if dirname.startswith(parentdir_prefix):\n return {\n \"version\": dirname[len(parentdir_prefix) :],\n \"full-revisionid\": None,\n \"dirty\": False,\n \"error\": None,\n \"date\": None,\n }\n else:\n rootdirs.append(root)\n root = os.path.dirname(root) # up a level\n\n if verbose:\n print(\"Tried directories %s but none started with prefix %s\" % (str(rootdirs), parentdir_prefix))\n raise NotThisMethod(\"rootdir doesn't start with parentdir_prefix\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_SHORT_VERSION_PY_versions_from_file.return.json_loads_mo_group_1_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_SHORT_VERSION_PY_versions_from_file.return.json_loads_mo_group_1_", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1193, "end_line": 1223, "span_ids": ["versions_from_file", "impl:6"], "tokens": 236}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "SHORT_VERSION_PY = \"\"\"\n# This file was generated by 'versioneer.py' (0.19) from\n# revision-control system data, or from the parent directory name of an\n# unpacked source archive. Distribution tarballs contain a pre-generated copy\n# of this file.\n\nimport json\n\nversion_json = '''\n%s\n''' # END VERSION_JSON\n\n\ndef get_versions():\n return json.loads(version_json)\n\"\"\"\n\n\ndef versions_from_file(filename):\n \"\"\"Try to determine the version from _version.py if present.\"\"\"\n try:\n with open(filename) as f:\n contents = f.read()\n except EnvironmentError:\n raise NotThisMethod(\"unable to read _version.py\")\n mo = re.search(r\"version_json = '''\\n(.*)''' # END VERSION_JSON\", contents, re.M | re.S)\n if not mo:\n mo = re.search(r\"version_json = '''\\r\\n(.*)''' # END VERSION_JSON\", contents, re.M | re.S)\n if not mo:\n raise NotThisMethod(\"no version_json in _version.py\")\n return json.loads(mo.group(1))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_write_to_version_file_plus_or_dot.return._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_write_to_version_file_plus_or_dot.return._", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1224, "end_line": 1238, "span_ids": ["plus_or_dot", "write_to_version_file"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def write_to_version_file(filename, versions):\n \"\"\"Write the given version number to the given _version.py file.\"\"\"\n os.unlink(filename)\n contents = json.dumps(versions, sort_keys=True, indent=1, separators=(\",\", \": \"))\n with open(filename, \"w\") as f:\n f.write(SHORT_VERSION_PY % contents)\n\n print(\"set %s to '%s'\" % (filename, versions[\"version\"]))\n\n\ndef plus_or_dot(pieces):\n \"\"\"Return a + if we don't already have one, else return a .\"\"\"\n if \"+\" in pieces.get(\"closest-tag\", \"\"):\n return \".\"\n return \"+\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_pep440_post_render_pep440_post.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_pep440_post_render_pep440_post.return.rendered", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1281, "end_line": 1305, "span_ids": ["render_pep440_post"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_pep440_post(pieces):\n \"\"\"TAG[.postDISTANCE[.dev0]+gHEX] .\n\n The \".dev0\" means dirty. Note that .dev0 sorts backwards\n (a dirty tree will appear \"older\" than the corresponding clean one),\n but you shouldn't be releasing software with -dirty anyways.\n\n Exceptions:\n 1: no tags. 0.postDISTANCE[.dev0]\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n if pieces[\"distance\"] or pieces[\"dirty\"]:\n rendered += \".post%d\" % pieces[\"distance\"]\n if pieces[\"dirty\"]:\n rendered += \".dev0\"\n rendered += plus_or_dot(pieces)\n rendered += \"g%s\" % pieces[\"short\"]\n else:\n # exception #1\n rendered = \"0.post%d\" % pieces[\"distance\"]\n if pieces[\"dirty\"]:\n rendered += \".dev0\"\n rendered += \"+g%s\" % pieces[\"short\"]\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_pep440_old_render_pep440_old.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_pep440_old_render_pep440_old.return.rendered", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1310, "end_line": 1329, "span_ids": ["render_pep440_old"], "tokens": 143}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_pep440_old(pieces):\n \"\"\"TAG[.postDISTANCE[.dev0]] .\n\n The \".dev0\" means dirty.\n\n Exceptions:\n 1: no tags. 0.postDISTANCE[.dev0]\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n if pieces[\"distance\"] or pieces[\"dirty\"]:\n rendered += \".post%d\" % pieces[\"distance\"]\n if pieces[\"dirty\"]:\n rendered += \".dev0\"\n else:\n # exception #1\n rendered = \"0.post%d\" % pieces[\"distance\"]\n if pieces[\"dirty\"]:\n rendered += \".dev0\"\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_git_describe_render_git_describe.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_git_describe_render_git_describe.return.rendered", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1330, "end_line": 1347, "span_ids": ["render_git_describe"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_git_describe(pieces):\n \"\"\"TAG[-DISTANCE-gHEX][-dirty].\n\n Like 'git describe --tags --dirty --always'.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no 'g' prefix)\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n if pieces[\"distance\"]:\n rendered += \"-%d-g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n else:\n # exception #1\n rendered = pieces[\"short\"]\n if pieces[\"dirty\"]:\n rendered += \"-dirty\"\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_git_describe_long_render_git_describe_long.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_git_describe_long_render_git_describe_long.return.rendered", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1350, "end_line": 1367, "span_ids": ["render_git_describe_long"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_git_describe_long(pieces):\n \"\"\"TAG-DISTANCE-gHEX[-dirty].\n\n Like 'git describe --tags --dirty --always -long'.\n The distance/hash is unconditional.\n\n Exceptions:\n 1: no tags. HEX[-dirty] (note: no 'g' prefix)\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n rendered += \"-%d-g%s\" % (pieces[\"distance\"], pieces[\"short\"])\n else:\n # exception #1\n rendered = pieces[\"short\"]\n if pieces[\"dirty\"]:\n rendered += \"-dirty\"\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_VersioneerBadRootError._The_project_root_direc": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_render_VersioneerBadRootError._The_project_root_direc", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1370, "end_line": 1409, "span_ids": ["VersioneerBadRootError", "render"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render(pieces, style):\n \"\"\"Render the given version pieces into the requested style.\"\"\"\n if pieces[\"error\"]:\n return {\n \"version\": \"unknown\",\n \"full-revisionid\": pieces.get(\"long\"),\n \"dirty\": None,\n \"error\": pieces[\"error\"],\n \"date\": None,\n }\n\n if not style or style == \"default\":\n style = \"pep440\" # the default\n\n if style == \"pep440\":\n rendered = render_pep440(pieces)\n elif style == \"pep440-pre\":\n rendered = render_pep440_pre(pieces)\n elif style == \"pep440-post\":\n rendered = render_pep440_post(pieces)\n elif style == \"pep440-old\":\n rendered = render_pep440_old(pieces)\n elif style == \"git-describe\":\n rendered = render_git_describe(pieces)\n elif style == \"git-describe-long\":\n rendered = render_git_describe_long(pieces)\n else:\n raise ValueError(\"unknown style '%s'\" % style)\n\n return {\n \"version\": rendered,\n \"full-revisionid\": pieces[\"long\"],\n \"dirty\": pieces[\"dirty\"],\n \"error\": None,\n \"date\": pieces.get(\"date\"),\n }\n\n\nclass VersioneerBadRootError(Exception):\n \"\"\"The project root directory is unknown or missing key files.\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_versions_get_versions.return._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_versions_get_versions.return._", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1412, "end_line": 1488, "span_ids": ["get_versions"], "tokens": 619}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_versions(verbose=False):\n \"\"\"Get the project version from whatever source is available.\n\n Returns dict with two keys: 'version' and 'full'.\n \"\"\"\n if \"versioneer\" in sys.modules:\n # see the discussion in cmdclass.py:get_cmdclass()\n del sys.modules[\"versioneer\"]\n\n root = get_root()\n cfg = get_config_from_root(root)\n\n assert cfg.VCS is not None, \"please set [versioneer]VCS= in setup.cfg\"\n handlers = HANDLERS.get(cfg.VCS)\n assert handlers, \"unrecognized VCS '%s'\" % cfg.VCS\n verbose = verbose or cfg.verbose\n assert cfg.versionfile_source is not None, \"please set versioneer.versionfile_source\"\n assert cfg.tag_prefix is not None, \"please set versioneer.tag_prefix\"\n\n versionfile_abs = os.path.join(root, cfg.versionfile_source)\n\n # extract version from first of: _version.py, VCS command (e.g. 'git\n # describe'), parentdir. This is meant to work for developers using a\n # source checkout, for users of a tarball created by 'setup.py sdist',\n # and for users of a tarball/zipball created by 'git archive' or github's\n # download-from-tag feature or the equivalent in other VCSes.\n\n get_keywords_f = handlers.get(\"get_keywords\")\n from_keywords_f = handlers.get(\"keywords\")\n if get_keywords_f and from_keywords_f:\n try:\n keywords = get_keywords_f(versionfile_abs)\n ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)\n if verbose:\n print(\"got version from expanded keyword %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n try:\n ver = versions_from_file(versionfile_abs)\n if verbose:\n print(\"got version from file %s %s\" % (versionfile_abs, ver))\n return ver\n except NotThisMethod:\n pass\n\n from_vcs_f = handlers.get(\"pieces_from_vcs\")\n if from_vcs_f:\n try:\n pieces = from_vcs_f(cfg.tag_prefix, root, verbose)\n ver = render(pieces, cfg.style)\n if verbose:\n print(\"got version from VCS %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n try:\n if cfg.parentdir_prefix:\n ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n if verbose:\n print(\"got version from parentdir %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n if verbose:\n print(\"unable to compute version\")\n\n return {\n \"version\": \"0+unknown\",\n \"full-revisionid\": None,\n \"dirty\": None,\n \"error\": \"unable to compute version\",\n \"date\": None,\n }", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_version_get_cmdclass.from_distutils_core_impor": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_version_get_cmdclass.from_distutils_core_impor", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1493, "end_line": 1522, "span_ids": ["get_cmdclass", "get_version"], "tokens": 344}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_version():\n \"\"\"Get the short version string for this project.\"\"\"\n return get_versions()[\"version\"]\n\n\ndef get_cmdclass(cmdclass=None):\n \"\"\"Get the custom setuptools/distutils subclasses used by Versioneer.\n\n If the package uses a different cmdclass (e.g. one from numpy), it\n should be provide as an argument.\n \"\"\"\n if \"versioneer\" in sys.modules:\n del sys.modules[\"versioneer\"]\n # this fixes the \"python setup.py develop\" case (also 'install' and\n # 'easy_install .'), in which subdependencies of the main project are\n # built (using setup.py bdist_egg) in the same python process. Assume\n # a main project A and a dependency B, which use different versions\n # of Versioneer. A's setup.py imports A's Versioneer, leaving it in\n # sys.modules by the time B's setup.py is executed, causing B to run\n # with the wrong versioneer. Setuptools wraps the sub-dep builds in a\n # sandbox that restores sys.modules to it's pre-build state, so the\n # parent is protected against the child's \"import versioneer\". By\n # removing ourselves from sys.modules here, before the child build\n # happens, we protect the child from the parent's versioneer too.\n # Also see https://github.com/python-versioneer/python-versioneer/issues/52\n\n cmds = {} if cmdclass is None else cmdclass.copy()\n\n # we add \"version\" to both distutils and setuptools\n from distutils.core import Command\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmd_version_get_cmdclass.cmd_version.run.if_vers_error_.print_error_s_vers": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmd_version_get_cmdclass.cmd_version.run.if_vers_error_.print_error_s_vers", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1524, "end_line": 1542, "span_ids": ["get_cmdclass"], "tokens": 155}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_cmdclass(cmdclass=None):\n # ... other code\n\n class cmd_version(Command):\n description = \"report generated version string\"\n user_options = []\n boolean_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n def run(self):\n vers = get_versions(verbose=True)\n print(\"Version: %s\" % vers[\"version\"])\n print(\" full-revisionid: %s\" % vers.get(\"full-revisionid\"))\n print(\" dirty: %s\" % vers.get(\"dirty\"))\n print(\" date: %s\" % vers.get(\"date\"))\n if vers[\"error\"]:\n print(\" error: %s\" % vers[\"error\"])\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmd_sdist_get_cmdclass.return.cmds": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_get_cmdclass.cmd_sdist_get_cmdclass.return.cmds", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1683, "end_line": 1705, "span_ids": ["get_cmdclass"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_cmdclass(cmdclass=None):\n # ... other code\n\n class cmd_sdist(_sdist):\n def run(self):\n versions = get_versions()\n self._versioneer_generated_versions = versions\n # unless we update this, the command will keep using the old\n # version\n self.distribution.metadata.version = versions[\"version\"]\n return _sdist.run(self)\n\n def make_release_tree(self, base_dir, files):\n root = get_root()\n cfg = get_config_from_root(root)\n _sdist.make_release_tree(self, base_dir, files)\n # now locate _version.py in the new base_dir directory\n # (remembering that it may be a hardlink) and replace it with an\n # updated value\n target_versionfile = os.path.join(base_dir, cfg.versionfile_source)\n print(\"UPDATING %s\" % target_versionfile)\n write_to_version_file(target_versionfile, self._versioneer_generated_versions)\n\n cmds[\"sdist\"] = cmd_sdist\n\n return cmds", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_CONFIG_ERROR_INIT_PY_SNIPPET._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_CONFIG_ERROR_INIT_PY_SNIPPET._", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1708, "end_line": 1749, "span_ids": ["impl:8"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "CONFIG_ERROR = \"\"\"\nsetup.cfg is missing the necessary Versioneer configuration. You need\na section like:\n\n [versioneer]\n VCS = git\n style = pep440\n versionfile_source = src/myproject/_version.py\n versionfile_build = myproject/_version.py\n tag_prefix =\n parentdir_prefix = myproject-\n\nYou will also need to edit your setup.py to use the results:\n\n import versioneer\n setup(version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(), ...)\n\nPlease read the docstring in ./versioneer.py for configuration instructions,\nedit setup.cfg, and re-run the installer or 'python versioneer.py setup'.\n\"\"\"\n\nSAMPLE_CONFIG = \"\"\"\n# See the docstring in versioneer.py for instructions. Note that you must\n# re-run 'versioneer.py setup' after changing this section, and commit the\n# resulting files.\n\n[versioneer]\n#VCS = git\n#style = pep440\n#versionfile_source =\n#versionfile_build =\n#tag_prefix =\n#parentdir_prefix =\n\n\"\"\"\n\nINIT_PY_SNIPPET = \"\"\"\nfrom ._version import get_versions\n__version__ = get_versions()['version']\ndel get_versions\n\"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_do_setup_do_setup.return.0": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_do_setup_do_setup.return.0", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1752, "end_line": 1831, "span_ids": ["do_setup"], "tokens": 763}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def do_setup():\n \"\"\"Do main VCS-independent setup function for installing Versioneer.\"\"\"\n root = get_root()\n try:\n cfg = get_config_from_root(root)\n except (EnvironmentError, configparser.NoSectionError, configparser.NoOptionError) as e:\n if isinstance(e, (EnvironmentError, configparser.NoSectionError)):\n print(\"Adding sample versioneer config to setup.cfg\", file=sys.stderr)\n with open(os.path.join(root, \"setup.cfg\"), \"a\") as f:\n f.write(SAMPLE_CONFIG)\n print(CONFIG_ERROR, file=sys.stderr)\n return 1\n\n print(\" creating %s\" % cfg.versionfile_source)\n with open(cfg.versionfile_source, \"w\") as f:\n LONG = LONG_VERSION_PY[cfg.VCS]\n f.write(\n LONG\n % {\n \"DOLLAR\": \"$\",\n \"STYLE\": cfg.style,\n \"TAG_PREFIX\": cfg.tag_prefix,\n \"PARENTDIR_PREFIX\": cfg.parentdir_prefix,\n \"VERSIONFILE_SOURCE\": cfg.versionfile_source,\n }\n )\n\n ipy = os.path.join(os.path.dirname(cfg.versionfile_source), \"__init__.py\")\n if os.path.exists(ipy):\n try:\n with open(ipy, \"r\") as f:\n old = f.read()\n except EnvironmentError:\n old = \"\"\n if INIT_PY_SNIPPET not in old:\n print(\" appending to %s\" % ipy)\n with open(ipy, \"a\") as f:\n f.write(INIT_PY_SNIPPET)\n else:\n print(\" %s unmodified\" % ipy)\n else:\n print(\" %s doesn't exist, ok\" % ipy)\n ipy = None\n\n # Make sure both the top-level \"versioneer.py\" and versionfile_source\n # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so\n # they'll be copied into source distributions. Pip won't be able to\n # install the package without this.\n manifest_in = os.path.join(root, \"MANIFEST.in\")\n simple_includes = set()\n try:\n with open(manifest_in, \"r\") as f:\n for line in f:\n if line.startswith(\"include \"):\n for include in line.split()[1:]:\n simple_includes.add(include)\n except EnvironmentError:\n pass\n # That doesn't cover everything MANIFEST.in can do\n # (http://docs.python.org/2/distutils/sourcedist.html#commands), so\n # it might give some false negatives. Appending redundant 'include'\n # lines is safe, though.\n if \"versioneer.py\" not in simple_includes:\n print(\" appending 'versioneer.py' to MANIFEST.in\")\n with open(manifest_in, \"a\") as f:\n f.write(\"include versioneer.py\\n\")\n else:\n print(\" 'versioneer.py' already in MANIFEST.in\")\n if cfg.versionfile_source not in simple_includes:\n print(\" appending versionfile_source ('%s') to MANIFEST.in\" % cfg.versionfile_source)\n with open(manifest_in, \"a\") as f:\n f.write(\"include %s\\n\" % cfg.versionfile_source)\n else:\n print(\" versionfile_source already in MANIFEST.in\")\n\n # Make VCS-specific changes. For git, this means creating/changing\n # .gitattributes to mark _version.py for export-subst keyword\n # substitution.\n do_vcs_install(manifest_in, cfg.versionfile_source, ipy)\n return 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_scan_setup_py_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/versioneer.py_scan_setup_py_", "embedding": null, "metadata": {"file_path": "versioneer.py", "file_name": "versioneer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1834, "end_line": 1878, "span_ids": ["scan_setup_py", "impl:14"], "tokens": 351}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def scan_setup_py():\n \"\"\"Validate the contents of setup.py against Versioneer's expectations.\"\"\"\n found = set()\n setters = False\n errors = 0\n with open(\"setup.py\", \"r\") as f:\n for line in f.readlines():\n if \"import versioneer\" in line:\n found.add(\"import\")\n if \"versioneer.get_cmdclass()\" in line:\n found.add(\"cmdclass\")\n if \"versioneer.get_version()\" in line:\n found.add(\"get_version\")\n if \"versioneer.VCS\" in line:\n setters = True\n if \"versioneer.versionfile_source\" in line:\n setters = True\n if len(found) != 3:\n print(\"\")\n print(\"Your setup.py appears to be missing some important items\")\n print(\"(but I might be wrong). Please make sure it has something\")\n print(\"roughly like the following:\")\n print(\"\")\n print(\" import versioneer\")\n print(\" setup( version=versioneer.get_version(),\")\n print(\" cmdclass=versioneer.get_cmdclass(), ...)\")\n print(\"\")\n errors += 1\n if setters:\n print(\"You should remove lines like 'versioneer.VCS = ' and\")\n print(\"'versioneer.versionfile_source = ' . This configuration\")\n print(\"now lives in setup.cfg, and should be removed from setup.py\")\n print(\"\")\n errors += 1\n return errors\n\n\nif __name__ == \"__main__\":\n cmd = sys.argv[1]\n if cmd == \"setup\":\n errors = do_setup()\n errors += scan_setup_py()\n if errors:\n sys.exit(1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_extractall_extractall.if_filepath_endswith_zip.else_.raise_ValueError_Unsuppo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_extractall_extractall.if_filepath_endswith_zip.else_.raise_ValueError_Unsuppo", "embedding": null, "metadata": {"file_path": "monai/apps/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 198, "end_line": 233, "span_ids": ["extractall"], "tokens": 356}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def extractall(filepath: str, output_dir: str, hash_val: Optional[str] = None, hash_type: str = \"md5\") -> None:\n \"\"\"\n Extract file to the output directory.\n Expected file types are: `zip`, `tar.gz` and `tar`.\n\n Args:\n filepath: the file path of compressed file.\n output_dir: target directory to save extracted files.\n hash_val: expected hash value to validate the compressed file.\n if None, skip hash validation.\n hash_type: 'md5' or 'sha1', defaults to 'md5'.\n\n Raises:\n RuntimeError: When the hash validation of the ``filepath`` compressed file fails.\n ValueError: When the ``filepath`` file extension is not one of [zip\", \"tar.gz\", \"tar\"].\n\n \"\"\"\n target_file = os.path.join(output_dir, os.path.basename(filepath).split(\".\")[0])\n if os.path.exists(target_file):\n print(f\"extracted file {target_file} exists, skip extracting.\")\n return\n if not check_hash(filepath, hash_val, hash_type):\n raise RuntimeError(\n f\"{hash_type} check of compressed file failed: \" f\"filepath={filepath}, expected {hash_type}={hash_val}.\"\n )\n\n if filepath.endswith(\"zip\"):\n zip_file = zipfile.ZipFile(filepath)\n zip_file.extractall(output_dir)\n zip_file.close()\n elif filepath.endswith(\"tar\") or filepath.endswith(\"tar.gz\"):\n tar_file = tarfile.open(filepath)\n tar_file.extractall(output_dir)\n tar_file.close()\n else:\n raise ValueError('Unsupported file extension, available options are: [\"zip\", \"tar.gz\", \"tar\"].')", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/type_definitions.py_from_typing_import_Collec_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/type_definitions.py_from_typing_import_Collec_", "embedding": null, "metadata": {"file_path": "monai/config/type_definitions.py", "file_name": "type_definitions.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 70, "span_ids": ["docstring:13", "docstring"], "tokens": 473}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Collection, Hashable, Iterable, TypeVar, Union\n\nimport numpy as np\nimport torch\n\n__all__ = [\"KeysCollection\", \"IndexSelection\", \"DtypeLike\", \"NdarrayTensor\"]\n\n\"\"\"Commonly used concepts\nThis module provides naming and type specifications for commonly used concepts\nwithin the MONAI package. The intent is to explicitly identify information\nthat should be used consistently throughout the entire MONAI package.\n\nA type would be named as type_definitions.KeysCollection\nwhich includes a meaningful name for the consent in the name itself. The\ndefinitions in this file map context meaningful names to the underlying\nobject properties that define the expected API.\n\nA conceptual type is represented by a new type name but is also one which\ncan be different depending on an environment (i.e. differences for python 3.6 vs 3.9\nmay be implemented). Consistent use of the concept and recorded documentation of\nthe rationale and convention behind it lowers the learning curve for new\ndevelopers. For readability, short names are preferred.\n\"\"\"\n\nKeysCollection = Union[Collection[Hashable], Hashable]\n\"\"\"KeysCollection\n\nThe KeyCollection type is used to for defining variables\nthat store a subset of keys to select items from a dictionary.\nThe container of keys must contain hashable elements.\nNOTE: `Hashable` is not a collection, but is provided as a\n convenience to end-users. All supplied values will be\n internally converted to a tuple of `Hashable`'s before\n use\n\"\"\"\n\n\nIndexSelection = Union[Iterable[int], int]\n\"\"\"IndexSelection\n\nThe IndexSelection type is used to for defining variables\nthat store a subset of indices to select items from a List or Array like objects.\nThe indices must be integers, and if a container of indices is specified, the\ncontainer must be iterable.\n\"\"\"\n\nDtypeLike = Union[\n np.dtype,\n type,\n None,\n]\n\"\"\"Type of datatypes\nadapted from https://github.com/numpy/numpy/blob/master/numpy/typing/_dtype_like.py\n\"\"\"\n\n# Generic type which can represent either a numpy.ndarray or a torch.Tensor\n# Unlike Union can create a dependence between parameter(s) / return(s)\nNdarrayTensor = TypeVar(\"NdarrayTensor\", np.ndarray, torch.Tensor)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_csv_CSVSaver.__init__.self._data_index.0": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/csv_saver.py_csv_CSVSaver.__init__.self._data_index.0", "embedding": null, "metadata": {"file_path": "monai/data/csv_saver.py", "file_name": "csv_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 46, "span_ids": ["CSVSaver.__init__", "CSVSaver", "docstring"], "tokens": 305}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import csv\nimport os\nfrom collections import OrderedDict\nfrom typing import Dict, Optional, Union\n\nimport numpy as np\nimport torch\n\nfrom monai.utils import ImageMetaKey as Key\n\n\nclass CSVSaver:\n \"\"\"\n Save the data in a dictionary format cache, and write to a CSV file finally.\n Typically, the data can be classification predictions, call `save` for single data\n or call `save_batch` to save a batch of data together, and call `finalize` to write\n the cached data into CSV file. If no meta data provided, use index from 0 to save data.\n \"\"\"\n\n def __init__(self, output_dir: str = \"./\", filename: str = \"predictions.csv\", overwrite: bool = True) -> None:\n \"\"\"\n Args:\n output_dir: output CSV file directory.\n filename: name of the saved CSV file name.\n overwrite: whether to overwriting existing CSV file content. If we are not overwriting,\n then we check if the results have been previously saved, and load them to the prediction_dict.\n\n \"\"\"\n self.output_dir = output_dir\n self._cache_dict: OrderedDict = OrderedDict()\n if not (isinstance(filename, str) and filename[-4:] == \".csv\"):\n raise AssertionError(\"filename must be a string with CSV format.\")\n self._filepath = os.path.join(output_dir, filename)\n self.overwrite = overwrite\n self._data_index = 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_from_typing_import_Dict__PNGSaver.__init__.self._data_index.0": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/png_saver.py_from_typing_import_Dict__PNGSaver.__init__.self._data_index.0", "embedding": null, "metadata": {"file_path": "monai/data/png_saver.py", "file_name": "png_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 72, "span_ids": ["PNGSaver.__init__", "PNGSaver", "docstring"], "tokens": 603}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Dict, Optional, Union\n\nimport numpy as np\nimport torch\n\nfrom monai.data.png_writer import write_png\nfrom monai.data.utils import create_file_basename\nfrom monai.utils import ImageMetaKey as Key\nfrom monai.utils import InterpolateMode\n\n\nclass PNGSaver:\n \"\"\"\n Save the data as png file, it can support single data content or a batch of data.\n Typically, the data can be segmentation predictions, call `save` for single data\n or call `save_batch` to save a batch of data together. If no meta data provided,\n use index from 0 as the filename prefix.\n \"\"\"\n\n def __init__(\n self,\n output_dir: str = \"./\",\n output_postfix: str = \"seg\",\n output_ext: str = \".png\",\n resample: bool = True,\n mode: Union[InterpolateMode, str] = InterpolateMode.NEAREST,\n scale: Optional[int] = None,\n data_root_dir: str = \"\",\n ) -> None:\n \"\"\"\n Args:\n output_dir: output image directory.\n output_postfix: a string appended to all output file names.\n output_ext: output file extension name.\n resample: whether to resample and resize if providing spatial_shape in the metadata.\n mode: {``\"nearest\"``, ``\"linear\"``, ``\"bilinear\"``, ``\"bicubic\"``, ``\"trilinear\"``, ``\"area\"``}\n The interpolation mode. Defaults to ``\"nearest\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#interpolate\n scale: {``255``, ``65535``} postprocess data by clipping to [0, 1] and scaling\n [0, 255] (uint8) or [0, 65535] (uint16). Default is None to disable scaling.\n data_root_dir: if not empty, it specifies the beginning parts of the input file's\n absolute path. it's used to compute `input_file_rel_path`, the relative path to the file from\n `data_root_dir` to preserve folder structure when saving in case there are files in different\n folders with the same file names. for example:\n input_file_name: /foo/bar/test1/image.png,\n postfix: seg\n output_ext: png\n output_dir: /output,\n data_root_dir: /foo/bar,\n output will be: /output/test1/image/image_seg.png\n\n \"\"\"\n self.output_dir = output_dir\n self.output_postfix = output_postfix\n self.output_ext = output_ext\n self.resample = resample\n self.mode: InterpolateMode = InterpolateMode(mode)\n self.scale = scale\n self.data_root_dir = data_root_dir\n\n self._data_index = 0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_from_typing_import_TYPE_C__default_eval_transform.return.y_pred_y": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/multi_gpu_supervised_trainer.py_from_typing_import_TYPE_C__default_eval_transform.return.y_pred_y", "embedding": null, "metadata": {"file_path": "monai/engines/multi_gpu_supervised_trainer.py", "file_name": "multi_gpu_supervised_trainer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 45, "span_ids": ["_default_eval_transform", "_default_transform", "docstring"], "tokens": 325}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import TYPE_CHECKING, Callable, Dict, Optional, Sequence, Tuple\n\nimport torch\nimport torch.nn\nfrom torch.nn.parallel import DataParallel, DistributedDataParallel\nfrom torch.optim.optimizer import Optimizer\n\nfrom monai.engines.utils import get_devices_spec\nfrom monai.utils import exact_version, optional_import\n\ncreate_supervised_trainer, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"create_supervised_trainer\")\ncreate_supervised_evaluator, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"create_supervised_evaluator\")\n_prepare_batch, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"_prepare_batch\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\n from ignite.metrics import Metric\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")\n Metric, _ = optional_import(\"ignite.metrics\", \"0.4.4\", exact_version, \"Metric\")\n\n__all__ = [\n \"create_multigpu_supervised_trainer\",\n \"create_multigpu_supervised_evaluator\",\n]\n\n\ndef _default_transform(_x: torch.Tensor, _y: torch.Tensor, _y_pred: torch.Tensor, loss: torch.Tensor) -> float:\n return loss.item()\n\n\ndef _default_eval_transform(\n x: torch.Tensor, y: torch.Tensor, y_pred: torch.Tensor\n) -> Tuple[torch.Tensor, torch.Tensor]:\n return y_pred, y", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer_GanTrainer._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer_GanTrainer._", "embedding": null, "metadata": {"file_path": "monai/engines/trainer.py", "file_name": "trainer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 187, "end_line": 231, "span_ids": ["GanTrainer"], "tokens": 637}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GanTrainer(Trainer):\n \"\"\"\n Generative adversarial network training based on Goodfellow et al. 2014 https://arxiv.org/abs/1406.266,\n inherits from ``Trainer`` and ``Workflow``.\n\n Training Loop: for each batch of data size `m`\n 1. Generate `m` fakes from random latent codes.\n 2. Update discriminator with these fakes and current batch reals, repeated d_train_steps times.\n 3. If g_update_latents, generate `m` fakes from new random latent codes.\n 4. Update generator with these fakes using discriminator feedback.\n\n Args:\n device: an object representing the device on which to run.\n max_epochs: the total epoch number for engine to run.\n train_data_loader: Core ignite engines uses `DataLoader` for training loop batchdata.\n g_network: generator (G) network architecture.\n g_optimizer: G optimizer function.\n g_loss_function: G loss function for optimizer.\n d_network: discriminator (D) network architecture.\n d_optimizer: D optimizer function.\n d_loss_function: D loss function for optimizer.\n epoch_length: number of iterations for one epoch, default to `len(train_data_loader)`.\n g_inferer: inference method to execute G model forward. Defaults to ``SimpleInferer()``.\n d_inferer: inference method to execute D model forward. Defaults to ``SimpleInferer()``.\n d_train_steps: number of times to update D with real data minibatch. Defaults to ``1``.\n latent_shape: size of G input latent code. Defaults to ``64``.\n non_blocking: if True and this copy is between CPU and GPU, the copy may occur asynchronously\n with respect to the host. For other cases, this argument has no effect.\n d_prepare_batch: callback function to prepare batchdata for D inferer.\n Defaults to return ``GanKeys.REALS`` in batchdata dict.\n g_prepare_batch: callback function to create batch of latent input for G inferer.\n Defaults to return random latents.\n g_update_latents: Calculate G loss with new latent codes. Defaults to ``True``.\n iteration_update: the callable function for every iteration, expect to accept `engine`\n and `batchdata` as input parameters. if not provided, use `self._iteration()` instead.\n post_transform: execute additional transformation for the model output data.\n Typically, several Tensor based transforms composed by `Compose`.\n key_train_metric: compute metric when every iteration completed, and save average value to\n engine.state.metrics when epoch completed. key_train_metric is the main metric to compare and save the\n checkpoint into files.\n additional_metrics: more Ignite metrics that also attach to Ignite Engine.\n train_handlers: every handler is a set of Ignite Event-Handlers, must have `attach` function, like:\n CheckpointHandler, StatsHandler, SegmentationSaver, etc.\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer.__init___GanTrainer.__init__.self.g_update_latents.g_update_latents": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer.__init___GanTrainer.__init__.self.g_update_latents.g_update_latents", "embedding": null, "metadata": {"file_path": "monai/engines/trainer.py", "file_name": "trainer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 233, "end_line": 287, "span_ids": ["GanTrainer.__init__"], "tokens": 496}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GanTrainer(Trainer):\n\n def __init__(\n self,\n device: torch.device,\n max_epochs: int,\n train_data_loader: DataLoader,\n g_network: torch.nn.Module,\n g_optimizer: Optimizer,\n g_loss_function: Callable,\n d_network: torch.nn.Module,\n d_optimizer: Optimizer,\n d_loss_function: Callable,\n epoch_length: Optional[int] = None,\n g_inferer: Optional[Inferer] = None,\n d_inferer: Optional[Inferer] = None,\n d_train_steps: int = 1,\n latent_shape: int = 64,\n non_blocking: bool = False,\n d_prepare_batch: Callable = default_prepare_batch,\n g_prepare_batch: Callable = default_make_latent,\n g_update_latents: bool = True,\n iteration_update: Optional[Callable] = None,\n post_transform: Optional[Transform] = None,\n key_train_metric: Optional[Dict[str, Metric]] = None,\n additional_metrics: Optional[Dict[str, Metric]] = None,\n train_handlers: Optional[Sequence] = None,\n ):\n if not isinstance(train_data_loader, DataLoader):\n raise ValueError(\"train_data_loader must be PyTorch DataLoader.\")\n\n # set up Ignite engine and environments\n super().__init__(\n device=device,\n max_epochs=max_epochs,\n data_loader=train_data_loader,\n epoch_length=epoch_length,\n non_blocking=non_blocking,\n prepare_batch=d_prepare_batch,\n iteration_update=iteration_update,\n key_metric=key_train_metric,\n additional_metrics=additional_metrics,\n handlers=train_handlers,\n post_transform=post_transform,\n )\n self.g_network = g_network\n self.g_optimizer = g_optimizer\n self.g_loss_function = g_loss_function\n self.g_inferer = SimpleInferer() if g_inferer is None else g_inferer\n self.d_network = d_network\n self.d_optimizer = d_optimizer\n self.d_loss_function = d_loss_function\n self.d_inferer = SimpleInferer() if d_inferer is None else d_inferer\n self.d_train_steps = d_train_steps\n self.latent_shape = latent_shape\n self.g_prepare_batch = g_prepare_batch\n self.g_update_latents = g_update_latents", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer._iteration_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/trainer.py_GanTrainer._iteration_", "embedding": null, "metadata": {"file_path": "monai/engines/trainer.py", "file_name": "trainer.py", "file_type": "text/x-python", "category": "implementation", "start_line": 289, "end_line": 338, "span_ids": ["GanTrainer._iteration"], "tokens": 423}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GanTrainer(Trainer):\n\n def _iteration(\n self, engine: Engine, batchdata: Union[Dict, Sequence]\n ) -> Dict[str, Union[torch.Tensor, int, float, bool]]:\n \"\"\"\n Callback function for Adversarial Training processing logic of 1 iteration in Ignite Engine.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n batchdata: input data for this iteration, usually can be dictionary or tuple of Tensor data.\n\n Raises:\n ValueError: must provide batch data for current iteration.\n\n \"\"\"\n if batchdata is None:\n raise ValueError(\"must provide batch data for current iteration.\")\n\n d_input = self.prepare_batch(batchdata, engine.state.device, engine.non_blocking)\n batch_size = self.data_loader.batch_size # type: ignore\n g_input = self.g_prepare_batch(batch_size, self.latent_shape, engine.state.device, engine.non_blocking)\n g_output = self.g_inferer(g_input, self.g_network)\n\n # Train Discriminator\n d_total_loss = torch.zeros(\n 1,\n )\n for _ in range(self.d_train_steps):\n self.d_optimizer.zero_grad()\n dloss = self.d_loss_function(g_output, d_input)\n dloss.backward()\n self.d_optimizer.step()\n d_total_loss += dloss.item()\n\n # Train Generator\n if self.g_update_latents:\n g_input = self.g_prepare_batch(batch_size, self.latent_shape, engine.state.device, engine.non_blocking)\n g_output = self.g_inferer(g_input, self.g_network)\n self.g_optimizer.zero_grad()\n g_loss = self.g_loss_function(g_output)\n g_loss.backward()\n self.g_optimizer.step()\n\n return {\n GanKeys.REALS: d_input,\n GanKeys.FAKES: g_output,\n GanKeys.LATENTS: g_input,\n GanKeys.GLOSS: g_loss.item(),\n GanKeys.DLOSS: d_total_loss.item(),\n }", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/utils.py_get_devices_spec_get_devices_spec.return.devices": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/utils.py_get_devices_spec_get_devices_spec.return.devices", "embedding": null, "metadata": {"file_path": "monai/engines/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 46, "end_line": 74, "span_ids": ["get_devices_spec"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_devices_spec(devices: Optional[Sequence[torch.device]] = None) -> List[torch.device]:\n \"\"\"\n Get a valid specification for one or more devices. If `devices` is None get devices for all CUDA devices available.\n If `devices` is and zero-length structure a single CPU compute device is returned. In any other cases `devices` is\n returned unchanged.\n\n Args:\n devices: list of devices to request, None for all GPU devices, [] for CPU.\n\n Raises:\n RuntimeError: When all GPUs are selected (``devices=None``) but no GPUs are available.\n\n Returns:\n list of torch.device: list of devices.\n\n \"\"\"\n if devices is None:\n devices = [torch.device(f\"cuda:{d:d}\") for d in range(torch.cuda.device_count())]\n\n if len(devices) == 0:\n raise RuntimeError(\"No GPU devices available.\")\n\n elif len(devices) == 0:\n devices = [torch.device(\"cpu\")]\n\n else:\n devices = list(devices)\n\n return devices", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/utils.py_default_prepare_batch_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/engines/utils.py_default_prepare_batch_", "embedding": null, "metadata": {"file_path": "monai/engines/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 95, "end_line": 127, "span_ids": ["default_make_latent", "default_prepare_batch"], "tokens": 306}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def default_prepare_batch(\n batchdata: Dict[str, torch.Tensor],\n device: Optional[Union[str, torch.device]] = None,\n non_blocking: bool = False,\n) -> Union[Tuple[torch.Tensor, Optional[torch.Tensor]], torch.Tensor]:\n \"\"\"\n Default function to prepare the data for current iteration.\n Refer to ignite: https://github.com/pytorch/ignite/blob/v0.4.2/ignite/engine/__init__.py#L28.\n\n Returns:\n image, label(optional).\n\n \"\"\"\n if not isinstance(batchdata, dict):\n raise AssertionError(\"default prepare_batch expects dictionary input data.\")\n if isinstance(batchdata.get(CommonKeys.LABEL, None), torch.Tensor):\n return (\n batchdata[CommonKeys.IMAGE].to(device=device, non_blocking=non_blocking),\n batchdata[CommonKeys.LABEL].to(device=device, non_blocking=non_blocking),\n )\n if GanKeys.REALS in batchdata:\n return batchdata[GanKeys.REALS].to(device=device, non_blocking=non_blocking)\n return batchdata[CommonKeys.IMAGE].to(device=device, non_blocking=non_blocking), None\n\n\ndef default_make_latent(\n num_latents: int,\n latent_size: int,\n device: Optional[Union[str, torch.device]] = None,\n non_blocking: bool = False,\n) -> torch.Tensor:\n return torch.randn(num_latents, latent_size).to(device=device, non_blocking=non_blocking)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo", "embedding": null, "metadata": {"file_path": "monai/handlers/checkpoint_loader.py", "file_name": "checkpoint_loader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 25, "span_ids": ["docstring"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nfrom typing import TYPE_CHECKING, Dict, Optional\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.utils import exact_version, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nCheckpoint, _ = optional_import(\"ignite.handlers\", \"0.4.4\", exact_version, \"Checkpoint\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_CheckpointLoader_CheckpointLoader.attach.engine_add_event_handler_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_CheckpointLoader_CheckpointLoader.attach.engine_add_event_handler_", "embedding": null, "metadata": {"file_path": "monai/handlers/checkpoint_loader.py", "file_name": "checkpoint_loader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 28, "end_line": 85, "span_ids": ["CheckpointLoader.attach", "CheckpointLoader", "CheckpointLoader.__init__"], "tokens": 566}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CheckpointLoader:\n \"\"\"\n CheckpointLoader acts as an Ignite handler to load checkpoint data from file.\n It can load variables for network, optimizer, lr_scheduler, etc.\n If saving checkpoint after `torch.nn.DataParallel`, need to save `model.module` instead\n as PyTorch recommended and then use this loader to load the model.\n\n Args:\n load_path: the file path of checkpoint, it should be a PyTorch `pth` file.\n load_dict: target objects that load checkpoint to. examples::\n\n {'network': net, 'optimizer': optimizer, 'lr_scheduler': lr_scheduler}\n\n name: identifier of logging.logger to use, if None, defaulting to ``engine.logger``.\n map_location: when loading the module for distributed training/evaluation,\n need to provide an appropriate map_location argument to prevent a process\n to step into others\u2019 devices. If map_location is missing, torch.load will\n first load the module to CPU and then copy each parameter to where it was\n saved, which would result in all processes on the same machine using the\n same set of devices.\n strict: whether to strictly enforce that the keys in `state_dict` match the keys\n returned by `torch.nn.Module.state_dict` function. default to `True`.\n strict_shape: whether to enforce the data shape of the matched layers in the checkpoint,\n `if `False`, it will skip the layers that have different data shape with checkpoint content.\n This can be useful advanced feature for transfer learning. users should totally\n understand which layers will have different shape. default to `True`.\n\n \"\"\"\n\n def __init__(\n self,\n load_path: str,\n load_dict: Dict,\n name: Optional[str] = None,\n map_location: Optional[Dict] = None,\n strict: bool = True,\n strict_shape: bool = True,\n ) -> None:\n if load_path is None:\n raise AssertionError(\"must provide clear path to load checkpoint.\")\n self.load_path = load_path\n if not (load_dict is not None and len(load_dict) > 0):\n raise AssertionError(\"must provide target objects to load.\")\n self.logger = logging.getLogger(name)\n self.load_dict = load_dict\n self._name = name\n self.map_location = map_location\n self.strict = strict\n self.strict_shape = strict_shape\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if self._name is None:\n self.logger = engine.logger\n engine.add_event_handler(Events.STARTED, self)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_CheckpointLoader.__call___": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_loader.py_CheckpointLoader.__call___", "embedding": null, "metadata": {"file_path": "monai/handlers/checkpoint_loader.py", "file_name": "checkpoint_loader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 87, "end_line": 121, "span_ids": ["CheckpointLoader.__call__"], "tokens": 377}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CheckpointLoader:\n\n def __call__(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n checkpoint = torch.load(self.load_path, map_location=self.map_location)\n\n if not self.strict_shape:\n k, _ = list(self.load_dict.items())[0]\n # single object and checkpoint is directly a state_dict\n if len(self.load_dict) == 1 and k not in checkpoint:\n checkpoint = {k: checkpoint}\n\n # skip items that don't match data shape\n for k, obj in self.load_dict.items():\n if isinstance(obj, (nn.DataParallel, nn.parallel.DistributedDataParallel)):\n obj = obj.module\n if isinstance(obj, torch.nn.Module):\n d = obj.state_dict()\n checkpoint[k] = {k: v for k, v in checkpoint[k].items() if k in d and v.shape == d[k].shape}\n\n # save current max epochs setting in the engine, don't overwrite it if larger than max_epochs in checkpoint\n prior_max_epochs = engine.state.max_epochs\n Checkpoint.load_objects(to_load=self.load_dict, checkpoint=checkpoint, strict=self.strict)\n if engine.state.epoch > prior_max_epochs:\n raise ValueError(\n f\"Epoch count ({engine.state.epoch}) in checkpoint is larger than \"\n f\"the `engine.state.max_epochs` ({prior_max_epochs}) of engine. To further train from checkpoint, \"\n \"construct trainer with `max_epochs` larger than checkpoint's epoch count. \"\n \"To use checkpoint for inference, no need to load state_dict for the engine.\"\n )\n engine.state.max_epochs = prior_max_epochs\n\n self.logger.info(f\"Restored all variables from {self.load_path}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.exception_raised_CheckpointSaver.exception_raised.raise_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.exception_raised_CheckpointSaver.exception_raised.raise_e", "embedding": null, "metadata": {"file_path": "monai/handlers/checkpoint_saver.py", "file_name": "checkpoint_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 256, "end_line": 275, "span_ids": ["CheckpointSaver.exception_raised"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CheckpointSaver:\n\n def exception_raised(self, engine: Engine, e: Exception) -> None:\n \"\"\"Callback for train or validation/evaluation exception raised Event.\n Save current data as final checkpoint if configure save_final is True. This callback may be skipped\n because the logic with Ignite can only trigger the first attached handler for `EXCEPTION_RAISED` event.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n e: the exception caught in Ignite during engine.run().\n \"\"\"\n if not callable(self._final_checkpoint):\n raise AssertionError(\"Error: _final_checkpoint function not specified.\")\n # delete previous saved final checkpoint if existing\n self._delete_previous_final_ckpt()\n self._final_checkpoint(engine)\n if self.logger is None:\n raise AssertionError\n if not hasattr(self.logger, \"info\"):\n raise AssertionError(\"Error, provided logger has not info attribute.\")\n self.logger.info(f\"Exception_raised, saved exception checkpoint: {self._final_checkpoint.last_checkpoint}\")\n raise e", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.metrics_completed_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/checkpoint_saver.py_CheckpointSaver.metrics_completed_", "embedding": null, "metadata": {"file_path": "monai/handlers/checkpoint_saver.py", "file_name": "checkpoint_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 277, "end_line": 305, "span_ids": ["CheckpointSaver.metrics_completed", "CheckpointSaver.interval_completed"], "tokens": 251}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CheckpointSaver:\n\n def metrics_completed(self, engine: Engine) -> None:\n \"\"\"Callback to compare metrics and save models in train or validation when epoch completed.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if not callable(self._key_metric_checkpoint):\n raise AssertionError(\"Error: _key_metric_checkpoint function not specified.\")\n self._key_metric_checkpoint(engine)\n\n def interval_completed(self, engine: Engine) -> None:\n \"\"\"Callback for train epoch/iteration completed Event.\n Save checkpoint if configure save_interval = N\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n if not callable(self._interval_checkpoint):\n raise AssertionError(\"Error: _interval_checkpoint function not specified.\")\n self._interval_checkpoint(engine)\n if self.logger is None:\n raise AssertionError\n if not hasattr(self.logger, \"info\"):\n raise AssertionError(\"Error, provided logger has not info attribute.\")\n if self.epoch_level:\n self.logger.info(f\"Saved checkpoint at epoch: {engine.state.epoch}\")\n else:\n self.logger.info(f\"Saved checkpoint at iteration: {engine.state.iteration}\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/lr_schedule_handler.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/lr_schedule_handler.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo", "embedding": null, "metadata": {"file_path": "monai/handlers/lr_schedule_handler.py", "file_name": "lr_schedule_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 23, "span_ids": ["docstring"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nfrom typing import TYPE_CHECKING, Any, Callable, Optional, Union\n\nfrom torch.optim.lr_scheduler import ReduceLROnPlateau, _LRScheduler\n\nfrom monai.utils import ensure_tuple, exact_version, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/roc_auc.py_ROCAUC.compute_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/roc_auc.py_ROCAUC.compute_", "embedding": null, "metadata": {"file_path": "monai/handlers/roc_auc.py", "file_name": "roc_auc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 74, "end_line": 95, "span_ids": ["ROCAUC.compute"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ROCAUC(EpochMetric):\n\n def compute(self) -> Any:\n _prediction_tensor = torch.cat(self._predictions, dim=0)\n _target_tensor = torch.cat(self._targets, dim=0)\n\n ws = idist.get_world_size()\n if ws > 1 and not self._is_reduced:\n # All gather across all processes\n _prediction_tensor = evenly_divisible_all_gather(_prediction_tensor)\n _target_tensor = evenly_divisible_all_gather(_target_tensor)\n self._is_reduced = True\n\n result: torch.Tensor = torch.zeros(1)\n if idist.get_rank() == 0:\n # Run compute_fn on zero rank only\n result = self.compute_fn(_prediction_tensor, _target_tensor)\n\n if ws > 1:\n # broadcast result to all processes\n result = idist.broadcast(result, src=0)\n\n return result.item() if torch.is_tensor(result) else result", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/segmentation_saver.py_logging_if_TYPE_CHECKING_.else_.Engine___optional_impo", "embedding": null, "metadata": {"file_path": "monai/handlers/segmentation_saver.py", "file_name": "segmentation_saver.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 26, "span_ids": ["docstring"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nimport warnings\nfrom typing import TYPE_CHECKING, Callable, Optional, Union\n\nimport numpy as np\n\nfrom monai.config import DtypeLike\nfrom monai.transforms import SaveImage\nfrom monai.utils import GridSampleMode, GridSamplePadMode, InterpolateMode, exact_version, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_logging_DEFAULT_TAG._Loss_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_logging_DEFAULT_TAG._Loss_", "embedding": null, "metadata": {"file_path": "monai/handlers/stats_handler.py", "file_name": "stats_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 27, "span_ids": ["docstring"], "tokens": 115}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nimport warnings\nfrom typing import TYPE_CHECKING, Any, Callable, Optional\n\nimport torch\n\nfrom monai.utils import exact_version, is_scalar, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")\n\nDEFAULT_KEY_VAL_FORMAT = \"{}: {:.4f} \"\nDEFAULT_TAG = \"Loss\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.epoch_completed_StatsHandler.iteration_completed.if_self_iteration_print_l.else_.self__default_iteration_p": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.epoch_completed_StatsHandler.iteration_completed.if_self_iteration_print_l.else_.self__default_iteration_p", "embedding": null, "metadata": {"file_path": "monai/handlers/stats_handler.py", "file_name": "stats_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 105, "end_line": 131, "span_ids": ["StatsHandler.epoch_completed", "StatsHandler.iteration_completed"], "tokens": 192}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class StatsHandler:\n\n def epoch_completed(self, engine: Engine) -> None:\n \"\"\"\n Handler for train or validation/evaluation epoch completed Event.\n Print epoch level log, default values are from Ignite state.metrics dict.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n \"\"\"\n if self.epoch_print_logger is not None:\n self.epoch_print_logger(engine)\n else:\n self._default_epoch_print(engine)\n\n def iteration_completed(self, engine: Engine) -> None:\n \"\"\"\n Handler for train or validation/evaluation iteration completed Event.\n Print iteration level log, default values are from Ignite state.logs dict.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n\n \"\"\"\n if self.iteration_print_logger is not None:\n self.iteration_print_logger(engine)\n else:\n self._default_iteration_print(engine)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.exception_raised_StatsHandler.exception_raised.raise_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/stats_handler.py_StatsHandler.exception_raised_StatsHandler.exception_raised.raise_e", "embedding": null, "metadata": {"file_path": "monai/handlers/stats_handler.py", "file_name": "stats_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 133, "end_line": 145, "span_ids": ["StatsHandler.exception_raised"], "tokens": 123}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class StatsHandler:\n\n def exception_raised(self, engine: Engine, e: Exception) -> None:\n \"\"\"\n Handler for train or validation/evaluation exception raised Event.\n Print the exception information and traceback. This callback may be skipped because the logic\n with Ignite can only trigger the first attached handler for `EXCEPTION_RAISED` event.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n e: the exception caught in Ignite during engine.run().\n\n \"\"\"\n self.logger.exception(f\"Exception: {e}\")\n raise e", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_warnings_DEFAULT_TAG._Loss_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/tensorboard_handlers.py_warnings_DEFAULT_TAG._Loss_", "embedding": null, "metadata": {"file_path": "monai/handlers/tensorboard_handlers.py", "file_name": "tensorboard_handlers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 29, "span_ids": ["docstring"], "tokens": 150}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import warnings\nfrom typing import TYPE_CHECKING, Any, Callable, Optional\n\nimport numpy as np\nimport torch\n\nfrom monai.utils import exact_version, is_scalar, optional_import\nfrom monai.visualize import plot_2d_or_3d_image\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\n from torch.utils.tensorboard import SummaryWriter\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")\n SummaryWriter, _ = optional_import(\"torch.utils.tensorboard\", name=\"SummaryWriter\")\n\nDEFAULT_TAG = \"Loss\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_warnings_DiceLoss._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_warnings_DiceLoss._", "embedding": null, "metadata": {"file_path": "monai/losses/dice.py", "file_name": "dice.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 40, "span_ids": ["DiceLoss", "docstring"], "tokens": 346}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import warnings\nfrom typing import Callable, List, Optional, Sequence, Union\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.nn.modules.loss import _Loss\n\nfrom monai.losses.focal_loss import FocalLoss\nfrom monai.networks import one_hot\nfrom monai.utils import LossReduction, Weight\n\n\nclass DiceLoss(_Loss):\n \"\"\"\n Compute average Dice loss between two tensors. It can support both multi-classes and multi-labels tasks.\n Input logits `input` (BNHW[D] where N is number of classes) is compared with ground truth `target` (BNHW[D]).\n Axis N of `input` is expected to have logit predictions for each class rather than being image channels,\n while the same axis of `target` can be 1 or N (one-hot format). The `smooth_nr` and `smooth_dr` parameters are\n values added to the intersection and union components of the inter-over-union calculation to smooth results\n respectively, these values should be small. The `include_background` class attribute can be set to False for\n an instance of DiceLoss to exclude the first category (channel index 0) which is by convention assumed to be\n background. If the non-background segmentations are small compared to the total image size they can get\n overwhelmed by the signal from the background so excluding it in such cases helps convergence.\n\n Milletari, F. et. al. (2016) V-Net: Fully Convolutional Neural Networks forVolumetric Medical Image Segmentation, 3DV, 2016.\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_MaskedDiceLoss_MaskedDiceLoss._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_MaskedDiceLoss_MaskedDiceLoss._", "embedding": null, "metadata": {"file_path": "monai/losses/dice.py", "file_name": "dice.py", "file_type": "text/x-python", "category": "implementation", "start_line": 160, "end_line": 168, "span_ids": ["MaskedDiceLoss"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MaskedDiceLoss(DiceLoss):\n \"\"\"\n Add an additional `masking` process before `DiceLoss`, accept a binary mask ([0, 1]) indicating a region,\n `input` and `target` will be masked by the region: region with mask `1` will keep the original value,\n region with `0` mask will be converted to `0`. Then feed `input` and `target` to normal `DiceLoss` computation.\n This has the effect of ensuring only the masked region contributes to the loss computation and\n hence gradient calculation.\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_MaskedDiceLoss.forward_MaskedDiceLoss.forward.return.super_forward_input_inp": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_MaskedDiceLoss.forward_MaskedDiceLoss.forward.return.super_forward_input_inp", "embedding": null, "metadata": {"file_path": "monai/losses/dice.py", "file_name": "dice.py", "file_type": "text/x-python", "category": "implementation", "start_line": 188, "end_line": 213, "span_ids": ["MaskedDiceLoss.forward"], "tokens": 302}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MaskedDiceLoss(DiceLoss):\n\n def forward(self, input: torch.Tensor, target: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor:\n \"\"\"\n Args:\n input: the shape should be BNH[WD].\n target: the shape should be BNH[WD].\n mask: the shape should B1H[WD] or 11H[WD].\n \"\"\"\n if mask is not None:\n # checking if mask is of proper shape\n if input.dim() != mask.dim():\n raise AssertionError(f\"dim of input ({input.shape}) is different from mask ({mask.shape})\")\n if not (input.shape[0] == mask.shape[0] or mask.shape[0] == 1):\n raise AssertionError(f\" batch size of mask ({mask.shape}) must be 1 or equal to input ({input.shape})\")\n\n if target.dim() > 1:\n if mask.shape[1] != 1:\n raise AssertionError(f\"mask ({mask.shape}) must have only 1 channel\")\n if input.shape[2:] != mask.shape[2:]:\n raise AssertionError(f\"spatial size of input ({input.shape}) is different from mask ({mask.shape})\")\n\n input = input * mask\n target = target * mask\n else:\n warnings.warn(\"no mask value specified for the MaskedDiceLoss.\")\n\n return super().forward(input=input, target=target)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedDiceLoss.forward_GeneralizedDiceLoss.forward.return.f": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedDiceLoss.forward_GeneralizedDiceLoss.forward.return.f", "embedding": null, "metadata": {"file_path": "monai/losses/dice.py", "file_name": "dice.py", "file_type": "text/x-python", "category": "implementation", "start_line": 294, "end_line": 363, "span_ids": ["GeneralizedDiceLoss.forward"], "tokens": 620}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GeneralizedDiceLoss(_Loss):\n\n def forward(self, input: torch.Tensor, target: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n input: the shape should be BNH[WD].\n target: the shape should be BNH[WD].\n\n Raises:\n ValueError: When ``self.reduction`` is not one of [\"mean\", \"sum\", \"none\"].\n\n \"\"\"\n if self.sigmoid:\n input = torch.sigmoid(input)\n n_pred_ch = input.shape[1]\n if self.softmax:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `softmax=True` ignored.\")\n else:\n input = torch.softmax(input, 1)\n\n if self.other_act is not None:\n input = self.other_act(input)\n\n if self.to_onehot_y:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `to_onehot_y=True` ignored.\")\n else:\n target = one_hot(target, num_classes=n_pred_ch)\n\n if not self.include_background:\n if n_pred_ch == 1:\n warnings.warn(\"single channel prediction, `include_background=False` ignored.\")\n else:\n # if skipping background, removing first channel\n target = target[:, 1:]\n input = input[:, 1:]\n\n if target.shape != input.shape:\n raise AssertionError(f\"ground truth has differing shape ({target.shape}) from input ({input.shape})\")\n\n # reducing only spatial dimensions (not batch nor channels)\n reduce_axis: List[int] = torch.arange(2, len(input.shape)).tolist()\n if self.batch:\n reduce_axis = [0] + reduce_axis\n intersection = torch.sum(target * input, reduce_axis)\n\n ground_o = torch.sum(target, reduce_axis)\n pred_o = torch.sum(input, reduce_axis)\n\n denominator = ground_o + pred_o\n\n w = self.w_func(ground_o.float())\n for b in w:\n infs = torch.isinf(b)\n b[infs] = 0.0\n b[infs] = torch.max(b)\n\n f: torch.Tensor = 1.0 - (2.0 * (intersection * w).sum(0 if self.batch else 1) + self.smooth_nr) / (\n (denominator * w).sum(0 if self.batch else 1) + self.smooth_dr\n )\n\n if self.reduction == LossReduction.MEAN.value:\n f = torch.mean(f) # the batch and channel average\n elif self.reduction == LossReduction.SUM.value:\n f = torch.sum(f) # sum over the batch and channel dims\n elif self.reduction == LossReduction.NONE.value:\n pass # returns [N, n_classes] losses\n else:\n raise ValueError(f'Unsupported reduction: {self.reduction}, available options are [\"mean\", \"sum\", \"none\"].')\n\n return f", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss.wasserstein_distance_map_GeneralizedWassersteinDiceLoss.wasserstein_distance_map.return.wasserstein_map": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/losses/dice.py_GeneralizedWassersteinDiceLoss.wasserstein_distance_map_GeneralizedWassersteinDiceLoss.wasserstein_distance_map.return.wasserstein_map", "embedding": null, "metadata": {"file_path": "monai/losses/dice.py", "file_name": "dice.py", "file_type": "text/x-python", "category": "implementation", "start_line": 502, "end_line": 538, "span_ids": ["GeneralizedWassersteinDiceLoss.wasserstein_distance_map"], "tokens": 414}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GeneralizedWassersteinDiceLoss(_Loss):\n\n def wasserstein_distance_map(self, flat_proba: torch.Tensor, flat_target: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Compute the voxel-wise Wasserstein distance between the\n flattened prediction and the flattened labels (ground_truth) with respect\n to the distance matrix on the label space M.\n This corresponds to eq. 6 in:\n\n Fidon L. et al. (2017) Generalised Wasserstein Dice Score for Imbalanced Multi-class\n Segmentation using Holistic Convolutional Networks. BrainLes 2017.\n\n Args:\n flat_proba: the probabilities of input(predicted) tensor.\n flat_target: the target tensor.\n \"\"\"\n # Turn the distance matrix to a map of identical matrix\n m = torch.clone(torch.as_tensor(self.m)).to(flat_proba.device)\n m_extended = torch.unsqueeze(m, dim=0)\n m_extended = torch.unsqueeze(m_extended, dim=3)\n m_extended = m_extended.expand((flat_proba.size(0), m_extended.size(1), m_extended.size(2), flat_proba.size(2)))\n\n # Expand the feature dimensions of the target\n flat_target_extended = torch.unsqueeze(flat_target, dim=1)\n flat_target_extended = flat_target_extended.expand(\n (flat_target.size(0), m_extended.size(1), flat_target.size(1))\n )\n flat_target_extended = torch.unsqueeze(flat_target_extended, dim=1)\n\n # Extract the vector of class distances for the ground-truth label at each voxel\n m_extended = torch.gather(m_extended, dim=1, index=flat_target_extended)\n m_extended = torch.squeeze(m_extended, dim=1)\n\n # Compute the wasserstein distance map\n wasserstein_map = m_extended * flat_proba\n\n # Sum over the classes\n wasserstein_map = torch.sum(wasserstein_map, dim=1)\n return wasserstein_map", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/aspp.py_from_typing_import_Sequen_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/aspp.py_from_typing_import_Sequen_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/aspp.py", "file_name": "aspp.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 101, "span_ids": ["SimpleASPP", "SimpleASPP.forward", "SimpleASPP.__init__", "docstring"], "tokens": 793}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Sequence\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks.convolutions import Convolution\nfrom monai.networks.layers import same_padding\nfrom monai.networks.layers.factories import Act, Conv, Norm\n\n\nclass SimpleASPP(nn.Module):\n \"\"\"\n A simplified version of the atrous spatial pyramid pooling (ASPP) module.\n\n Chen et al., Encoder-Decoder with Atrous Separable Convolution for Semantic Image Segmentation.\n https://arxiv.org/abs/1802.02611\n\n Wang et al., A Noise-robust Framework for Automatic Segmentation of COVID-19 Pneumonia Lesions\n from CT Images. https://ieeexplore.ieee.org/document/9109297\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n conv_out_channels: int,\n kernel_sizes: Sequence[int] = (1, 3, 3, 3),\n dilations: Sequence[int] = (1, 2, 4, 6),\n norm_type=Norm.BATCH,\n acti_type=Act.LEAKYRELU,\n ) -> None:\n \"\"\"\n Args:\n spatial_dims: number of spatial dimensions, could be 1, 2, or 3.\n in_channels: number of input channels.\n conv_out_channels: number of output channels of each atrous conv.\n The final number of output channels is conv_out_channels * len(kernel_sizes).\n kernel_sizes: a sequence of four convolutional kernel sizes.\n Defaults to (1, 3, 3, 3) for four (dilated) convolutions.\n dilations: a sequence of four convolutional dilation parameters.\n Defaults to (1, 2, 4, 6) for four (dilated) convolutions.\n norm_type: final kernel-size-one convolution normalization type.\n Defaults to batch norm.\n acti_type: final kernel-size-one convolution activation type.\n Defaults to leaky ReLU.\n\n Raises:\n ValueError: When ``kernel_sizes`` length differs from ``dilations``.\n\n See also:\n\n :py:class:`monai.networks.layers.Act`\n :py:class:`monai.networks.layers.Conv`\n :py:class:`monai.networks.layers.Norm`\n\n \"\"\"\n super().__init__()\n if len(kernel_sizes) != len(dilations):\n raise ValueError(\n \"kernel_sizes and dilations length must match, \"\n f\"got kernel_sizes={len(kernel_sizes)} dilations={len(dilations)}.\"\n )\n pads = tuple(same_padding(k, d) for k, d in zip(kernel_sizes, dilations))\n\n self.convs = nn.ModuleList()\n for k, d, p in zip(kernel_sizes, dilations, pads):\n _conv = Conv[Conv.CONV, spatial_dims](\n in_channels=in_channels, out_channels=conv_out_channels, kernel_size=k, dilation=d, padding=p\n )\n self.convs.append(_conv)\n\n out_channels = conv_out_channels * len(pads) # final conv. output channels\n self.conv_k1 = Convolution(\n dimensions=spatial_dims,\n in_channels=out_channels,\n out_channels=out_channels,\n kernel_size=1,\n act=acti_type,\n norm=norm_type,\n )\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n x: in shape (batch, channel, spatial_1[, spatial_2, ...]).\n \"\"\"\n x_out = torch.cat([conv(x) for conv in self.convs], dim=1)\n x_out = self.conv_k1(x_out)\n return x_out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_from_typing_import_Option_Convolution._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_from_typing_import_Option_Convolution._", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/convolutions.py", "file_name": "convolutions.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 94, "span_ids": ["Convolution", "docstring"], "tokens": 721}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Sequence, Tuple, Union\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks import ADN\nfrom monai.networks.layers.convutils import same_padding, stride_minus_kernel_padding\nfrom monai.networks.layers.factories import Conv\n\n\nclass Convolution(nn.Sequential):\n \"\"\"\n Constructs a convolution with normalization, optional dropout, and optional activation layers::\n\n -- (Conv|ConvTrans) -- (Norm -- Dropout -- Acti) --\n\n if ``conv_only`` set to ``True``::\n\n -- (Conv|ConvTrans) --\n\n For example:\n\n .. code-block:: python\n\n from monai.networks.blocks import Convolution\n\n conv = Convolution(\n dimensions=3,\n in_channels=1,\n out_channels=1,\n adn_ordering=\"ADN\",\n act=(\"prelu\", {\"init\": 0.2}),\n dropout=0.1,\n norm=(\"layer\", {\"normalized_shape\": (10, 10, 10)}),\n )\n print(conv)\n\n output::\n\n Convolution(\n (conv): Conv3d(1, 1, kernel_size=(3, 3, 3), stride=(1, 1, 1), padding=(1, 1, 1))\n (adn): ADN(\n (A): PReLU(num_parameters=1)\n (D): Dropout(p=0.1, inplace=False)\n (N): LayerNorm((10, 10, 10), eps=1e-05, elementwise_affine=True)\n )\n )\n\n Args:\n dimensions: number of spatial dimensions.\n in_channels: number of input channels.\n out_channels: number of output channels.\n strides: convolution stride. Defaults to 1.\n kernel_size: convolution kernel size. Defaults to 3.\n adn_ordering: a string representing the ordering of activation, normalization, and dropout.\n Defaults to \"NDA\".\n act: activation type and arguments. Defaults to PReLU.\n norm: feature normalization type and arguments. Defaults to instance norm.\n dropout: dropout ratio. Defaults to no dropout.\n dropout_dim: determine the dimensions of dropout. Defaults to 1.\n\n - When dropout_dim = 1, randomly zeroes some of the elements for each channel.\n - When dropout_dim = 2, Randomly zeroes out entire channels (a channel is a 2D feature map).\n - When dropout_dim = 3, Randomly zeroes out entire channels (a channel is a 3D feature map).\n\n The value of dropout_dim should be no no larger than the value of `dimensions`.\n dilation: dilation rate. Defaults to 1.\n groups: controls the connections between inputs and outputs. Defaults to 1.\n bias: whether to have a bias term. Defaults to True.\n conv_only: whether to use the convolutional layer only. Defaults to False.\n is_transposed: if True uses ConvTrans instead of Conv. Defaults to False.\n padding: controls the amount of implicit zero-paddings on both sides for padding number of points\n for each dimension. Defaults to None.\n output_padding: controls the additional size added to one side of the output shape.\n Defaults to None.\n\n See also:\n\n :py:class:`monai.networks.layers.Conv`\n :py:class:`monai.networks.blocks.ADN`\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_ResidualUnit_ResidualUnit._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_ResidualUnit_ResidualUnit._", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/convolutions.py", "file_name": "convolutions.py", "file_type": "text/x-python", "category": "implementation", "start_line": 169, "end_line": 241, "span_ids": ["ResidualUnit"], "tokens": 683}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ResidualUnit(nn.Module):\n \"\"\"\n Residual module with multiple convolutions and a residual connection.\n\n For example:\n\n .. code-block:: python\n\n from monai.networks.blocks import ResidualUnit\n\n convs = ResidualUnit(\n dimensions=3,\n in_channels=1,\n out_channels=1,\n adn_ordering=\"AN\",\n act=(\"prelu\", {\"init\": 0.2}),\n norm=(\"layer\", {\"normalized_shape\": (10, 10, 10)}),\n )\n print(convs)\n\n output::\n\n ResidualUnit(\n (conv): Sequential(\n (unit0): Convolution(\n (conv): Conv3d(1, 1, kernel_size=(3, 3, 3), stride=(1, 1, 1), padding=(1, 1, 1))\n (adn): ADN(\n (A): PReLU(num_parameters=1)\n (N): LayerNorm((10, 10, 10), eps=1e-05, elementwise_affine=True)\n )\n )\n (unit1): Convolution(\n (conv): Conv3d(1, 1, kernel_size=(3, 3, 3), stride=(1, 1, 1), padding=(1, 1, 1))\n (adn): ADN(\n (A): PReLU(num_parameters=1)\n (N): LayerNorm((10, 10, 10), eps=1e-05, elementwise_affine=True)\n )\n )\n )\n (residual): Identity()\n )\n\n Args:\n dimensions: number of spatial dimensions.\n in_channels: number of input channels.\n out_channels: number of output channels.\n strides: convolution stride. Defaults to 1.\n kernel_size: convolution kernel size. Defaults to 3.\n subunits: number of convolutions. Defaults to 2.\n adn_ordering: a string representing the ordering of activation, normalization, and dropout.\n Defaults to \"NDA\".\n act: activation type and arguments. Defaults to PReLU.\n norm: feature normalization type and arguments. Defaults to instance norm.\n dropout: dropout ratio. Defaults to no dropout.\n dropout_dim: determine the dimensions of dropout. Defaults to 1.\n\n - When dropout_dim = 1, randomly zeroes some of the elements for each channel.\n - When dropout_dim = 2, Randomly zero out entire channels (a channel is a 2D feature map).\n - When dropout_dim = 3, Randomly zero out entire channels (a channel is a 3D feature map).\n\n The value of dropout_dim should be no no larger than the value of `dimensions`.\n dilation: dilation rate. Defaults to 1.\n bias: whether to have a bias term. Defaults to True.\n last_conv_only: for the last subunit, whether to use the convolutional layer only.\n Defaults to False.\n padding: controls the amount of implicit zero-paddings on both sides for padding number of points\n for each dimension. Defaults to None.\n\n See also:\n\n :py:class:`monai.networks.blocks.Convolution`\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_ResidualUnit.__init___": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/convolutions.py_ResidualUnit.__init___", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/convolutions.py", "file_name": "convolutions.py", "file_type": "text/x-python", "category": "implementation", "start_line": 243, "end_line": 314, "span_ids": ["ResidualUnit.forward", "ResidualUnit.__init__"], "tokens": 634}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ResidualUnit(nn.Module):\n\n def __init__(\n self,\n dimensions: int,\n in_channels: int,\n out_channels: int,\n strides: Union[Sequence[int], int] = 1,\n kernel_size: Union[Sequence[int], int] = 3,\n subunits: int = 2,\n adn_ordering: str = \"NDA\",\n act: Optional[Union[Tuple, str]] = \"PRELU\",\n norm: Optional[Union[Tuple, str]] = \"INSTANCE\",\n dropout: Optional[Union[Tuple, str, float]] = None,\n dropout_dim: Optional[int] = 1,\n dilation: Union[Sequence[int], int] = 1,\n bias: bool = True,\n last_conv_only: bool = False,\n padding: Optional[Union[Sequence[int], int]] = None,\n ) -> None:\n super().__init__()\n self.dimensions = dimensions\n self.in_channels = in_channels\n self.out_channels = out_channels\n self.conv = nn.Sequential()\n self.residual = nn.Identity()\n if not padding:\n padding = same_padding(kernel_size, dilation)\n schannels = in_channels\n sstrides = strides\n subunits = max(1, subunits)\n\n for su in range(subunits):\n conv_only = last_conv_only and su == (subunits - 1)\n unit = Convolution(\n dimensions,\n schannels,\n out_channels,\n strides=sstrides,\n kernel_size=kernel_size,\n adn_ordering=adn_ordering,\n act=act,\n norm=norm,\n dropout=dropout,\n dropout_dim=dropout_dim,\n dilation=dilation,\n bias=bias,\n conv_only=conv_only,\n padding=padding,\n )\n\n self.conv.add_module(f\"unit{su:d}\", unit)\n\n # after first loop set channels and strides to what they should be for subsequent units\n schannels = out_channels\n sstrides = 1\n\n # apply convolution to input to change number of output channels and size to match that coming from self.conv\n if np.prod(strides) != 1 or in_channels != out_channels:\n rkernel_size = kernel_size\n rpadding = padding\n\n if np.prod(strides) == 1: # if only adapting number of channels a 1x1 kernel is used with no padding\n rkernel_size = 1\n rpadding = 0\n\n conv_type = Conv[Conv.CONV, dimensions]\n self.residual = conv_type(in_channels, out_channels, rkernel_size, strides, rpadding, bias=bias)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n res: torch.Tensor = self.residual(x) # create the additive residual from x\n cx: torch.Tensor = self.conv(x) # apply x to sequence of operations\n return cx + res # add the residual to the output", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_from_typing_import_Type_GCN.forward.return.x": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_from_typing_import_Type_GCN.forward.return.x", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/fcn.py", "file_name": "fcn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 57, "span_ids": ["GCN", "GCN.forward", "GCN.__init__", "docstring"], "tokens": 460}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Type\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom monai.networks.blocks.convolutions import Convolution\nfrom monai.networks.blocks.upsample import UpSample\nfrom monai.networks.layers.factories import Act, Conv, Norm\nfrom monai.utils import optional_import\n\nmodels, _ = optional_import(\"torchvision\", name=\"models\")\n\n\nclass GCN(nn.Module):\n \"\"\"\n The Global Convolutional Network module using large 1D\n Kx1 and 1xK kernels to represent 2D kernels.\n \"\"\"\n\n def __init__(self, inplanes: int, planes: int, ks: int = 7):\n \"\"\"\n Args:\n inplanes: number of input channels.\n planes: number of output channels.\n ks: kernel size for one dimension. Defaults to 7.\n \"\"\"\n super(GCN, self).__init__()\n\n conv2d_type: Type[nn.Conv2d] = Conv[Conv.CONV, 2]\n self.conv_l1 = conv2d_type(in_channels=inplanes, out_channels=planes, kernel_size=(ks, 1), padding=(ks // 2, 0))\n self.conv_l2 = conv2d_type(in_channels=planes, out_channels=planes, kernel_size=(1, ks), padding=(0, ks // 2))\n self.conv_r1 = conv2d_type(in_channels=inplanes, out_channels=planes, kernel_size=(1, ks), padding=(0, ks // 2))\n self.conv_r2 = conv2d_type(in_channels=planes, out_channels=planes, kernel_size=(ks, 1), padding=(ks // 2, 0))\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n x: in shape (batch, inplanes, spatial_1, spatial_2).\n \"\"\"\n x_l = self.conv_l1(x)\n x_l = self.conv_l2(x_l)\n x_r = self.conv_r1(x)\n x_r = self.conv_r2(x_r)\n x = x_l + x_r\n return x", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_Refine_Refine.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_Refine_Refine.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/fcn.py", "file_name": "fcn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 60, "end_line": 95, "span_ids": ["Refine", "Refine.__init__", "Refine.forward"], "tokens": 297}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Refine(nn.Module):\n \"\"\"\n Simple residual block to refine the details of the activation maps.\n \"\"\"\n\n def __init__(self, planes: int):\n \"\"\"\n Args:\n planes: number of input channels.\n \"\"\"\n super(Refine, self).__init__()\n\n relu_type: Type[nn.ReLU] = Act[Act.RELU]\n conv2d_type: Type[nn.Conv2d] = Conv[Conv.CONV, 2]\n norm2d_type: Type[nn.BatchNorm2d] = Norm[Norm.BATCH, 2]\n\n self.bn = norm2d_type(num_features=planes)\n self.relu = relu_type(inplace=True)\n self.conv1 = conv2d_type(in_channels=planes, out_channels=planes, kernel_size=3, padding=1)\n self.conv2 = conv2d_type(in_channels=planes, out_channels=planes, kernel_size=3, padding=1)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n x: in shape (batch, planes, spatial_1, spatial_2).\n \"\"\"\n residual = x\n x = self.bn(x)\n x = self.relu(x)\n x = self.conv1(x)\n x = self.bn(x)\n x = self.relu(x)\n x = self.conv2(x)\n\n out = residual + x\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_FCN_FCN.__init__.if_self_upsample_mode_.self.up_conv.UpSample_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_FCN_FCN.__init__.if_self_upsample_mode_.self.up_conv.UpSample_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/fcn.py", "file_name": "fcn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 98, "end_line": 163, "span_ids": ["FCN.__init__", "FCN"], "tokens": 662}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FCN(nn.Module):\n \"\"\"\n 2D FCN network with 3 input channels. The small decoder is built\n with the GCN and Refine modules.\n The code is adapted from `lsqshr's official 2D code `_.\n\n Args:\n out_channels: number of output channels. Defaults to 1.\n upsample_mode: [``\"transpose\"``, ``\"bilinear\"``]\n The mode of upsampling manipulations.\n Using the second mode cannot guarantee the model's reproducibility. Defaults to ``bilinear``.\n\n - ``transpose``, uses transposed convolution layers.\n - ``bilinear``, uses bilinear interpolation.\n\n pretrained: If True, returns a model pre-trained on ImageNet\n progress: If True, displays a progress bar of the download to stderr.\n \"\"\"\n\n def __init__(\n self, out_channels: int = 1, upsample_mode: str = \"bilinear\", pretrained: bool = True, progress: bool = True\n ):\n super(FCN, self).__init__()\n\n conv2d_type: Type[nn.Conv2d] = Conv[Conv.CONV, 2]\n\n self.upsample_mode = upsample_mode\n self.conv2d_type = conv2d_type\n self.out_channels = out_channels\n resnet = models.resnet50(pretrained=pretrained, progress=progress)\n\n self.conv1 = resnet.conv1\n self.bn0 = resnet.bn1\n self.relu = resnet.relu\n self.maxpool = resnet.maxpool\n\n self.layer1 = resnet.layer1\n self.layer2 = resnet.layer2\n self.layer3 = resnet.layer3\n self.layer4 = resnet.layer4\n\n self.gcn1 = GCN(2048, self.out_channels)\n self.gcn2 = GCN(1024, self.out_channels)\n self.gcn3 = GCN(512, self.out_channels)\n self.gcn4 = GCN(64, self.out_channels)\n self.gcn5 = GCN(64, self.out_channels)\n\n self.refine1 = Refine(self.out_channels)\n self.refine2 = Refine(self.out_channels)\n self.refine3 = Refine(self.out_channels)\n self.refine4 = Refine(self.out_channels)\n self.refine5 = Refine(self.out_channels)\n self.refine6 = Refine(self.out_channels)\n self.refine7 = Refine(self.out_channels)\n self.refine8 = Refine(self.out_channels)\n self.refine9 = Refine(self.out_channels)\n self.refine10 = Refine(self.out_channels)\n self.transformer = self.conv2d_type(in_channels=256, out_channels=64, kernel_size=1)\n\n if self.upsample_mode == \"transpose\":\n self.up_conv = UpSample(\n dimensions=2,\n in_channels=self.out_channels,\n scale_factor=2,\n mode=\"deconv\",\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_FCN.forward_FCN.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_FCN.forward_FCN.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/fcn.py", "file_name": "fcn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 164, "end_line": 206, "span_ids": ["FCN.forward"], "tokens": 512}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FCN(nn.Module):\n\n def forward(self, x: torch.Tensor):\n \"\"\"\n Args:\n x: in shape (batch, 3, spatial_1, spatial_2).\n \"\"\"\n org_input = x\n x = self.conv1(x)\n x = self.bn0(x)\n x = self.relu(x)\n conv_x = x\n x = self.maxpool(x)\n pool_x = x\n\n fm1 = self.layer1(x)\n fm2 = self.layer2(fm1)\n fm3 = self.layer3(fm2)\n fm4 = self.layer4(fm3)\n\n gcfm1 = self.refine1(self.gcn1(fm4))\n gcfm2 = self.refine2(self.gcn2(fm3))\n gcfm3 = self.refine3(self.gcn3(fm2))\n gcfm4 = self.refine4(self.gcn4(pool_x))\n gcfm5 = self.refine5(self.gcn5(conv_x))\n\n if self.upsample_mode == \"transpose\":\n fs1 = self.refine6(self.up_conv(gcfm1) + gcfm2)\n fs2 = self.refine7(self.up_conv(fs1) + gcfm3)\n fs3 = self.refine8(self.up_conv(fs2) + gcfm4)\n fs4 = self.refine9(self.up_conv(fs3) + gcfm5)\n out = self.refine10(self.up_conv(fs4))\n else:\n fs1 = self.refine6(\n F.interpolate(gcfm1, fm3.size()[2:], mode=self.upsample_mode, align_corners=True) + gcfm2\n )\n fs2 = self.refine7(F.interpolate(fs1, fm2.size()[2:], mode=self.upsample_mode, align_corners=True) + gcfm3)\n fs3 = self.refine8(\n F.interpolate(fs2, pool_x.size()[2:], mode=self.upsample_mode, align_corners=True) + gcfm4\n )\n fs4 = self.refine9(\n F.interpolate(fs3, conv_x.size()[2:], mode=self.upsample_mode, align_corners=True) + gcfm5\n )\n out = self.refine10(F.interpolate(fs4, org_input.size()[2:], mode=self.upsample_mode, align_corners=True))\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_MCFCN_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/fcn.py_MCFCN_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/fcn.py", "file_name": "fcn.py", "file_type": "text/x-python", "category": "implementation", "start_line": 210, "end_line": 258, "span_ids": ["MCFCN.forward", "MCFCN.__init__", "MCFCN"], "tokens": 375}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MCFCN(FCN):\n \"\"\"\n The multi-channel version of the 2D FCN module.\n Adds a projection layer to take arbitrary number of inputs.\n\n Args:\n in_channels: number of input channels. Defaults to 3.\n out_channels: number of output channels. Defaults to 1.\n upsample_mode: [``\"transpose\"``, ``\"bilinear\"``]\n The mode of upsampling manipulations.\n Using the second mode cannot guarantee the model's reproducibility. Defaults to ``bilinear``.\n\n - ``transpose``, uses transposed convolution layers.\n - ``bilinear``, uses bilinear interpolate.\n pretrained: If True, returns a model pre-trained on ImageNet\n progress: If True, displays a progress bar of the download to stderr.\n \"\"\"\n\n def __init__(\n self,\n in_channels: int = 3,\n out_channels: int = 1,\n upsample_mode: str = \"bilinear\",\n pretrained: bool = True,\n progress: bool = True,\n ):\n super(MCFCN, self).__init__(\n out_channels=out_channels, upsample_mode=upsample_mode, pretrained=pretrained, progress=progress\n )\n\n self.init_proj = Convolution(\n dimensions=2,\n in_channels=in_channels,\n out_channels=3,\n kernel_size=1,\n act=(\"relu\", {\"inplace\": True}),\n norm=Norm.BATCH,\n bias=False,\n )\n\n def forward(self, x: torch.Tensor):\n \"\"\"\n Args:\n x: in shape (batch, in_channels, spatial_1, spatial_2).\n \"\"\"\n x = self.init_proj(x)\n out = super(MCFCN, self).forward(x)\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/segresnet_block.py_ResBlock_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/segresnet_block.py_ResBlock_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/segresnet_block.py", "file_name": "segresnet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 65, "end_line": 119, "span_ids": ["ResBlock.__init__", "ResBlock.forward", "ResBlock"], "tokens": 444}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ResBlock(nn.Module):\n \"\"\"\n ResBlock employs skip connection and two convolution blocks and is used\n in SegResNet based on `3D MRI brain tumor segmentation using autoencoder regularization\n `_.\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n kernel_size: int = 3,\n norm_name: str = \"group\",\n num_groups: int = 8,\n ) -> None:\n \"\"\"\n Args:\n spatial_dims: number of spatial dimensions, could be 1, 2 or 3.\n in_channels: number of input channels.\n kernel_size: convolution kernel size, the value should be an odd number. Defaults to 3.\n norm_name: feature normalization type, this module only supports group norm,\n batch norm and instance norm. Defaults to ``group``.\n num_groups: number of groups to separate the channels into, in this module,\n in_channels should be divisible by num_groups. Defaults to 8.\n \"\"\"\n\n super().__init__()\n\n if kernel_size % 2 != 1:\n raise AssertionError(\"kernel_size should be an odd number.\")\n if in_channels % num_groups != 0:\n raise AssertionError(\"in_channels should be divisible by num_groups.\")\n\n self.norm1 = get_norm_layer(spatial_dims, in_channels, norm_name, num_groups=num_groups)\n self.norm2 = get_norm_layer(spatial_dims, in_channels, norm_name, num_groups=num_groups)\n self.relu = Act[Act.RELU](inplace=True)\n self.conv1 = get_conv_layer(spatial_dims, in_channels, in_channels)\n self.conv2 = get_conv_layer(spatial_dims, in_channels, in_channels)\n\n def forward(self, x):\n\n identity = x\n\n x = self.norm1(x)\n x = self.relu(x)\n x = self.conv1(x)\n\n x = self.norm2(x)\n x = self.relu(x)\n x = self.conv2(x)\n\n x += identity\n\n return x", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEBottleneck_SEBottleneck.__init__.super_SEBottleneck_self_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEBottleneck_SEBottleneck.__init__.super_SEBottleneck_self_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/squeeze_and_excitation.py", "file_name": "squeeze_and_excitation.py", "file_type": "text/x-python", "category": "implementation", "start_line": 224, "end_line": 270, "span_ids": ["SEBottleneck.__init__", "SEBottleneck"], "tokens": 342}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SEBottleneck(SEBlock):\n \"\"\"\n Bottleneck for SENet154.\n \"\"\"\n\n expansion = 4\n\n def __init__(\n self,\n spatial_dims: int,\n inplanes: int,\n planes: int,\n groups: int,\n reduction: int,\n stride: int = 1,\n downsample: Optional[Convolution] = None,\n ) -> None:\n\n conv_param_1 = {\n \"strides\": 1,\n \"kernel_size\": 1,\n \"act\": (\"relu\", {\"inplace\": True}),\n \"norm\": Norm.BATCH,\n \"bias\": False,\n }\n conv_param_2 = {\n \"strides\": stride,\n \"kernel_size\": 3,\n \"act\": (\"relu\", {\"inplace\": True}),\n \"norm\": Norm.BATCH,\n \"groups\": groups,\n \"bias\": False,\n }\n conv_param_3 = {\"strides\": 1, \"kernel_size\": 1, \"act\": None, \"norm\": Norm.BATCH, \"bias\": False}\n\n super(SEBottleneck, self).__init__(\n spatial_dims=spatial_dims,\n in_channels=inplanes,\n n_chns_1=planes * 2,\n n_chns_2=planes * 4,\n n_chns_3=planes * 4,\n conv_param_1=conv_param_1,\n conv_param_2=conv_param_2,\n conv_param_3=conv_param_3,\n project=downsample,\n r=reduction,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEResNetBottleneck_SEResNetBottleneck.__init__.super_SEResNetBottleneck_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEResNetBottleneck_SEResNetBottleneck.__init__.super_SEResNetBottleneck_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/squeeze_and_excitation.py", "file_name": "squeeze_and_excitation.py", "file_type": "text/x-python", "category": "implementation", "start_line": 273, "end_line": 321, "span_ids": ["SEResNetBottleneck", "SEResNetBottleneck.__init__"], "tokens": 388}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SEResNetBottleneck(SEBlock):\n \"\"\"\n ResNet bottleneck with a Squeeze-and-Excitation module. It follows Caffe\n implementation and uses `strides=stride` in `conv1` and not in `conv2`\n (the latter is used in the torchvision implementation of ResNet).\n \"\"\"\n\n expansion = 4\n\n def __init__(\n self,\n spatial_dims: int,\n inplanes: int,\n planes: int,\n groups: int,\n reduction: int,\n stride: int = 1,\n downsample: Optional[Convolution] = None,\n ) -> None:\n\n conv_param_1 = {\n \"strides\": stride,\n \"kernel_size\": 1,\n \"act\": (\"relu\", {\"inplace\": True}),\n \"norm\": Norm.BATCH,\n \"bias\": False,\n }\n conv_param_2 = {\n \"strides\": 1,\n \"kernel_size\": 3,\n \"act\": (\"relu\", {\"inplace\": True}),\n \"norm\": Norm.BATCH,\n \"groups\": groups,\n \"bias\": False,\n }\n conv_param_3 = {\"strides\": 1, \"kernel_size\": 1, \"act\": None, \"norm\": Norm.BATCH, \"bias\": False}\n\n super(SEResNetBottleneck, self).__init__(\n spatial_dims=spatial_dims,\n in_channels=inplanes,\n n_chns_1=planes,\n n_chns_2=planes,\n n_chns_3=planes * 4,\n conv_param_1=conv_param_1,\n conv_param_2=conv_param_2,\n conv_param_3=conv_param_3,\n project=downsample,\n r=reduction,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEResNeXtBottleneck_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/squeeze_and_excitation.py_SEResNeXtBottleneck_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/squeeze_and_excitation.py", "file_name": "squeeze_and_excitation.py", "file_type": "text/x-python", "category": "implementation", "start_line": 332, "end_line": 381, "span_ids": ["SEResNeXtBottleneck.__init__", "SEResNeXtBottleneck"], "tokens": 371}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SEResNeXtBottleneck(SEBlock):\n \"\"\"\n ResNeXt bottleneck type C with a Squeeze-and-Excitation module.\n \"\"\"\n\n expansion = 4\n\n def __init__(\n self,\n spatial_dims: int,\n inplanes: int,\n planes: int,\n groups: int,\n reduction: int,\n stride: int = 1,\n downsample: Optional[Convolution] = None,\n base_width: int = 4,\n ) -> None:\n\n conv_param_1 = {\n \"strides\": 1,\n \"kernel_size\": 1,\n \"act\": (\"relu\", {\"inplace\": True}),\n \"norm\": Norm.BATCH,\n \"bias\": False,\n }\n conv_param_2 = {\n \"strides\": stride,\n \"kernel_size\": 3,\n \"act\": (\"relu\", {\"inplace\": True}),\n \"norm\": Norm.BATCH,\n \"groups\": groups,\n \"bias\": False,\n }\n conv_param_3 = {\"strides\": 1, \"kernel_size\": 1, \"act\": None, \"norm\": Norm.BATCH, \"bias\": False}\n width = math.floor(planes * (base_width / 64)) * groups\n\n super().__init__(\n spatial_dims=spatial_dims,\n in_channels=inplanes,\n n_chns_1=width,\n n_chns_2=width,\n n_chns_3=planes * 4,\n conv_param_1=conv_param_1,\n conv_param_2=conv_param_2,\n conv_param_3=conv_param_3,\n project=downsample,\n r=reduction,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_calculate_out_shape_calculate_out_shape.return.out_shape_if_len_out_shap": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_calculate_out_shape_calculate_out_shape.return.out_shape_if_len_out_shap", "embedding": null, "metadata": {"file_path": "monai/networks/layers/convutils.py", "file_name": "convutils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 59, "end_line": 78, "span_ids": ["calculate_out_shape"], "tokens": 243}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def calculate_out_shape(\n in_shape: Union[Sequence[int], int, np.ndarray],\n kernel_size: Union[Sequence[int], int],\n stride: Union[Sequence[int], int],\n padding: Union[Sequence[int], int],\n) -> Union[Tuple[int, ...], int]:\n \"\"\"\n Calculate the output tensor shape when applying a convolution to a tensor of shape `inShape` with kernel size\n `kernel_size`, stride value `stride`, and input padding value `padding`. All arguments can be scalars or multiple\n values, return value is a scalar if all inputs are scalars.\n \"\"\"\n in_shape_np = np.atleast_1d(in_shape)\n kernel_size_np = np.atleast_1d(kernel_size)\n stride_np = np.atleast_1d(stride)\n padding_np = np.atleast_1d(padding)\n\n out_shape_np = ((in_shape_np - kernel_size_np + padding_np + padding_np) // stride_np) + 1\n out_shape = tuple(int(s) for s in out_shape_np)\n\n return out_shape if len(out_shape) > 1 else out_shape[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_LLTMFunction_LLTMFunction.backward.return.d_input_d_weights_d_bia": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_LLTMFunction_LLTMFunction.backward.return.d_input_d_weights_d_bia", "embedding": null, "metadata": {"file_path": "monai/networks/layers/simplelayers.py", "file_name": "simplelayers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 161, "end_line": 176, "span_ids": ["LLTMFunction.forward", "LLTMFunction", "LLTMFunction.backward"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LLTMFunction(Function):\n @staticmethod\n def forward(ctx, input, weights, bias, old_h, old_cell):\n outputs = _C.lltm_forward(input, weights, bias, old_h, old_cell)\n new_h, new_cell = outputs[:2]\n variables = outputs[1:] + [weights]\n ctx.save_for_backward(*variables)\n\n return new_h, new_cell\n\n @staticmethod\n def backward(ctx, grad_h, grad_cell):\n outputs = _C.lltm_backward(grad_h.contiguous(), grad_cell.contiguous(), *ctx.saved_tensors)\n d_old_h, d_input, d_weights, d_bias, d_old_cell = outputs[:5]\n\n return d_input, d_weights, d_bias, d_old_h, d_old_cell", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_LLTM_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_LLTM_", "embedding": null, "metadata": {"file_path": "monai/networks/layers/simplelayers.py", "file_name": "simplelayers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 146, "end_line": 176, "span_ids": ["LLTM.forward", "LLTM", "LLTM.__init__", "LLTM.reset_parameters"], "tokens": 290}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LLTM(nn.Module):\n \"\"\"\n This recurrent unit is similar to an LSTM, but differs in that it lacks a forget\n gate and uses an Exponential Linear Unit (ELU) as its internal activation function.\n Because this unit never forgets, call it LLTM, or Long-Long-Term-Memory unit.\n It has both C++ and CUDA implementation, automatically switch according to the\n target device where put this module to.\n\n Args:\n input_features: size of input feature data\n state_size: size of the state of recurrent unit\n\n Referring to: https://pytorch.org/tutorials/advanced/cpp_extension.html\n \"\"\"\n\n def __init__(self, input_features: int, state_size: int):\n super(LLTM, self).__init__()\n self.input_features = input_features\n self.state_size = state_size\n self.weights = nn.Parameter(torch.empty(3 * state_size, input_features + state_size))\n self.bias = nn.Parameter(torch.empty(1, 3 * state_size))\n self.reset_parameters()\n\n def reset_parameters(self):\n stdv = 1.0 / math.sqrt(self.state_size)\n for weight in self.parameters():\n weight.data.uniform_(-stdv, +stdv)\n\n def forward(self, input, state):\n return LLTMFunction.apply(input, self.weights, self.bias, *state)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_math_Bottleneck3x3x1.__init__.self.pool.pool_type_kernel_size_1_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_math_Bottleneck3x3x1.__init__.self.pool.pool_type_kernel_size_1_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 59, "span_ids": ["Bottleneck3x3x1.__init__", "Bottleneck3x3x1", "docstring"], "tokens": 425}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import math\nfrom typing import Optional, Sequence, Type, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom monai.networks.blocks.fcn import FCN\nfrom monai.networks.layers.factories import Act, Conv, Norm, Pool\n\n\nclass Bottleneck3x3x1(nn.Module):\n\n expansion = 4\n\n def __init__(\n self,\n spatial_dims: int,\n inplanes: int,\n planes: int,\n stride: Union[Sequence[int], int] = 1,\n downsample: Optional[nn.Sequential] = None,\n ) -> None:\n\n super(Bottleneck3x3x1, self).__init__()\n\n conv_type = Conv[Conv.CONV, spatial_dims]\n norm_type: Type[Union[nn.BatchNorm2d, nn.BatchNorm3d]] = Norm[Norm.BATCH, spatial_dims]\n pool_type: Type[Union[nn.MaxPool2d, nn.MaxPool3d]] = Pool[Pool.MAX, spatial_dims]\n relu_type: Type[nn.ReLU] = Act[Act.RELU]\n\n self.conv1 = conv_type(inplanes, planes, kernel_size=1, bias=False)\n self.bn1 = norm_type(planes)\n self.conv2 = conv_type(\n planes,\n planes,\n kernel_size=(3, 3, 1)[-spatial_dims:],\n stride=stride,\n padding=(1, 1, 0)[-spatial_dims:],\n bias=False,\n )\n self.bn2 = norm_type(planes)\n self.conv3 = conv_type(planes, planes * 4, kernel_size=1, bias=False)\n self.bn3 = norm_type(planes * 4)\n self.relu = relu_type(inplace=True)\n self.downsample = downsample\n self.stride = stride\n self.pool = pool_type(kernel_size=(1, 1, 2)[-spatial_dims:], stride=(1, 1, 2)[-spatial_dims:])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Bottleneck3x3x1.forward_Bottleneck3x3x1.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Bottleneck3x3x1.forward_Bottleneck3x3x1.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 60, "end_line": 82, "span_ids": ["Bottleneck3x3x1.forward"], "tokens": 133}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Bottleneck3x3x1(nn.Module):\n\n def forward(self, x):\n residual = x\n\n out = self.conv1(x)\n out = self.bn1(out)\n out = self.relu(out)\n\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu(out)\n\n out = self.conv3(out)\n out = self.bn3(out)\n\n if self.downsample is not None:\n residual = self.downsample(x)\n if out.size() != residual.size():\n out = self.pool(out)\n\n out += residual\n out = self.relu(out)\n\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Projection_Projection.__init__.self_add_module_conv_c": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Projection_Projection.__init__.self_add_module_conv_c", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 85, "end_line": 95, "span_ids": ["Projection.__init__", "Projection"], "tokens": 149}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Projection(nn.Sequential):\n def __init__(self, spatial_dims: int, num_input_features: int, num_output_features: int):\n super(Projection, self).__init__()\n\n conv_type = Conv[Conv.CONV, spatial_dims]\n norm_type: Type[Union[nn.BatchNorm2d, nn.BatchNorm3d]] = Norm[Norm.BATCH, spatial_dims]\n relu_type: Type[nn.ReLU] = Act[Act.RELU]\n\n self.add_module(\"norm\", norm_type(num_input_features))\n self.add_module(\"relu\", relu_type(inplace=True))\n self.add_module(\"conv\", conv_type(num_input_features, num_output_features, kernel_size=1, stride=1, bias=False))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_DenseBlock_DenseBlock.__init__.for_i_in_range_num_layers.self_add_module_denselay": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_DenseBlock_DenseBlock.__init__.for_i_in_range_num_layers.self_add_module_denselay", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 98, "end_line": 113, "span_ids": ["DenseBlock.__init__", "DenseBlock"], "tokens": 122}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DenseBlock(nn.Sequential):\n def __init__(\n self,\n spatial_dims: int,\n num_layers: int,\n num_input_features: int,\n bn_size: int,\n growth_rate: int,\n dropout_prob: float,\n ):\n super(DenseBlock, self).__init__()\n for i in range(num_layers):\n layer = Pseudo3DLayer(\n spatial_dims, num_input_features + i * growth_rate, growth_rate, bn_size, dropout_prob\n )\n self.add_module(\"denselayer%d\" % (i + 1), layer)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_UpTransition_UpTransition.__init__.if_upsample_mode_tran.else_.self_add_module_up_nn_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_UpTransition_UpTransition.__init__.if_upsample_mode_tran.else_.self_add_module_up_nn_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 117, "end_line": 139, "span_ids": ["UpTransition.__init__", "UpTransition"], "tokens": 283}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class UpTransition(nn.Sequential):\n def __init__(\n self, spatial_dims: int, num_input_features: int, num_output_features: int, upsample_mode: str = \"transpose\"\n ):\n super(UpTransition, self).__init__()\n\n conv_type = Conv[Conv.CONV, spatial_dims]\n norm_type: Type[Union[nn.BatchNorm2d, nn.BatchNorm3d]] = Norm[Norm.BATCH, spatial_dims]\n relu_type: Type[nn.ReLU] = Act[Act.RELU]\n\n self.add_module(\"norm\", norm_type(num_input_features))\n self.add_module(\"relu\", relu_type(inplace=True))\n self.add_module(\"conv\", conv_type(num_input_features, num_output_features, kernel_size=1, stride=1, bias=False))\n if upsample_mode == \"transpose\":\n conv_trans_type = Conv[Conv.CONVTRANS, spatial_dims]\n self.add_module(\n \"up\", conv_trans_type(num_output_features, num_output_features, kernel_size=2, stride=2, bias=False)\n )\n else:\n align_corners: Optional[bool] = None\n if upsample_mode in [\"trilinear\", \"bilinear\"]:\n align_corners = True\n self.add_module(\"up\", nn.Upsample(scale_factor=2, mode=upsample_mode, align_corners=align_corners))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Final_Final.__init__.if_upsample_mode_tran.else_.self_add_module_up_nn_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Final_Final.__init__.if_upsample_mode_tran.else_.self_add_module_up_nn_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 142, "end_line": 174, "span_ids": ["Final.__init__", "Final"], "tokens": 321}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Final(nn.Sequential):\n def __init__(\n self, spatial_dims: int, num_input_features: int, num_output_features: int, upsample_mode: str = \"transpose\"\n ):\n super(Final, self).__init__()\n\n conv_type = Conv[Conv.CONV, spatial_dims]\n norm_type: Type[Union[nn.BatchNorm2d, nn.BatchNorm3d]] = Norm[Norm.BATCH, spatial_dims]\n relu_type: Type[nn.ReLU] = Act[Act.RELU]\n\n self.add_module(\"norm\", norm_type(num_input_features))\n self.add_module(\"relu\", relu_type(inplace=True))\n self.add_module(\n \"conv\",\n conv_type(\n num_input_features,\n num_output_features,\n kernel_size=(3, 3, 1)[-spatial_dims:],\n stride=1,\n padding=(1, 1, 0)[-spatial_dims:],\n bias=False,\n ),\n )\n if upsample_mode == \"transpose\":\n conv_trans_type = Conv[Conv.CONVTRANS, spatial_dims]\n self.add_module(\n \"up\", conv_trans_type(num_output_features, num_output_features, kernel_size=2, stride=2, bias=False)\n )\n else:\n align_corners: Optional[bool] = None\n if upsample_mode in [\"trilinear\", \"bilinear\"]:\n align_corners = True\n self.add_module(\"up\", nn.Upsample(scale_factor=2, mode=upsample_mode, align_corners=align_corners))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Pseudo3DLayer_Pseudo3DLayer.__init__.self.dropout_prob.dropout_prob": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Pseudo3DLayer_Pseudo3DLayer.__init__.self.dropout_prob.dropout_prob", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 171, "end_line": 209, "span_ids": ["Pseudo3DLayer.__init__", "Pseudo3DLayer"], "tokens": 419}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Pseudo3DLayer(nn.Module):\n def __init__(self, spatial_dims: int, num_input_features: int, growth_rate: int, bn_size: int, dropout_prob: float):\n super(Pseudo3DLayer, self).__init__()\n # 1x1x1\n\n conv_type = Conv[Conv.CONV, spatial_dims]\n norm_type: Type[Union[nn.BatchNorm2d, nn.BatchNorm3d]] = Norm[Norm.BATCH, spatial_dims]\n relu_type: Type[nn.ReLU] = Act[Act.RELU]\n\n self.bn1 = norm_type(num_input_features)\n self.relu1 = relu_type(inplace=True)\n self.conv1 = conv_type(num_input_features, bn_size * growth_rate, kernel_size=1, stride=1, bias=False)\n # 3x3x1\n self.bn2 = norm_type(bn_size * growth_rate)\n self.relu2 = relu_type(inplace=True)\n self.conv2 = conv_type(\n bn_size * growth_rate,\n growth_rate,\n kernel_size=(3, 3, 1)[-spatial_dims:],\n stride=1,\n padding=(1, 1, 0)[-spatial_dims:],\n bias=False,\n )\n # 1x1x3\n self.bn3 = norm_type(growth_rate)\n self.relu3 = relu_type(inplace=True)\n self.conv3 = conv_type(\n growth_rate,\n growth_rate,\n kernel_size=(1, 1, 3)[-spatial_dims:],\n stride=1,\n padding=(0, 0, 1)[-spatial_dims:],\n bias=False,\n )\n # 1x1x1\n self.bn4 = norm_type(growth_rate)\n self.relu4 = relu_type(inplace=True)\n self.conv4 = conv_type(growth_rate, growth_rate, kernel_size=1, stride=1, bias=False)\n self.dropout_prob = dropout_prob", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Pseudo3DLayer.forward_Pseudo3DLayer.forward.return.torch_cat_inx_new_featu": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_Pseudo3DLayer.forward_Pseudo3DLayer.forward.return.torch_cat_inx_new_featu", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 217, "end_line": 240, "span_ids": ["Pseudo3DLayer.forward"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Pseudo3DLayer(nn.Module):\n\n def forward(self, x):\n inx = x\n x = self.bn1(x)\n x = self.relu1(x)\n x = self.conv1(x)\n\n x = self.bn2(x)\n x = self.relu2(x)\n x3x3x1 = self.conv2(x)\n\n x = self.bn3(x3x3x1)\n x = self.relu3(x)\n x1x1x3 = self.conv3(x)\n\n x = x3x3x1 + x1x1x3\n x = self.bn4(x)\n x = self.relu4(x)\n new_features = self.conv4(x)\n\n self.dropout_prob = 0.0 # Dropout will make trouble!\n # since we use the train mode for inference\n if self.dropout_prob > 0.0:\n new_features = F.dropout(new_features, p=self.dropout_prob, training=self.training)\n return torch.cat([inx, new_features], 1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_PSP.forward_PSP.forward.return.x": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_PSP.forward_PSP.forward.return.x", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 285, "end_line": 307, "span_ids": ["PSP.forward"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PSP(nn.Module):\n\n def forward(self, x):\n outputs = []\n if self.upsample_mode == \"transpose\":\n for (project_module, pool_module, up_module) in zip(\n self.project_modules, self.pool_modules, self.up_modules\n ):\n output = up_module(project_module(pool_module(x)))\n outputs.append(output)\n else:\n for (project_module, pool_module) in zip(self.project_modules, self.pool_modules):\n interpolate_size = x.shape[2:]\n align_corners: Optional[bool] = None\n if self.upsample_mode in [\"trilinear\", \"bilinear\"]:\n align_corners = True\n output = F.interpolate(\n project_module(pool_module(x)),\n size=interpolate_size,\n mode=self.upsample_mode,\n align_corners=align_corners,\n )\n outputs.append(output)\n x = torch.cat(outputs, dim=1)\n return x", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet_AHNet._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet_AHNet._", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 310, "end_line": 342, "span_ids": ["AHNet"], "tokens": 560}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AHNet(nn.Module):\n \"\"\"\n AHNet based on `Anisotropic Hybrid Network `_.\n Adapted from `lsqshr's official code `_.\n Except from the original network that supports 3D inputs, this implementation also supports 2D inputs.\n According to the `tests for deconvolutions `_, using\n ``\"transpose\"`` rather than linear interpolations is faster. Therefore, this implementation sets ``\"transpose\"``\n as the default upsampling method.\n\n To meet to requirements of the structure, for ``transpose`` mode, the input size of the first ``dim-1`` dimensions should\n be divisible by 2 ** (psp_block_num + 3) and no less than 32. For other modes, the input size of the first\n ``dim-1`` dimensions should be divisible by 32 and no less than 2 ** (psp_block_num + 3). In addition, at least one\n dimension should have a no less than 64 size.\n\n Args:\n layers: number of residual blocks for 4 layers of the network (layer1...layer4). Defaults to ``(3, 4, 6, 3)``.\n spatial_dims: spatial dimension of the input data. Defaults to 3.\n in_channels: number of input channels for the network. Default to 1.\n out_channels: number of output channels for the network. Defaults to 1.\n psp_block_num: the number of pyramid volumetric pooling modules used at the end of the network before the final\n output layer for extracting multiscale features. The number should be an integer that belongs to [0,4]. Defaults\n to 4.\n upsample_mode: [``\"transpose\"``, ``\"bilinear\"``, ``\"trilinear\"``, ``nearest``]\n The mode of upsampling manipulations.\n Using the last two modes cannot guarantee the model's reproducibility. Defaults to ``transpose``.\n\n - ``\"transpose\"``, uses transposed convolution layers.\n - ``\"bilinear\"``, uses bilinear interpolate.\n - ``\"trilinear\"``, uses trilinear interpolate.\n - ``\"nearest\"``, uses nearest interpolate.\n pretrained: whether to load pretrained weights from ResNet50 to initialize convolution layers, default to False.\n progress: If True, displays a progress bar of the download of pretrained weights to stderr.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet._make_layer_AHNet._make_layer.return.nn_Sequential_layers_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet._make_layer_AHNet._make_layer.return.nn_Sequential_layers_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 470, "end_line": 500, "span_ids": ["AHNet._make_layer"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AHNet(nn.Module):\n\n def _make_layer(\n self,\n block: Type[Bottleneck3x3x1],\n planes: int,\n blocks: int,\n stride: int = 1,\n ) -> nn.Sequential:\n downsample = None\n if stride != 1 or self.inplanes != planes * block.expansion:\n downsample = nn.Sequential(\n self.conv_type(\n self.inplanes,\n planes * block.expansion,\n kernel_size=1,\n stride=(stride, stride, 1)[: self.spatial_dims],\n bias=False,\n ),\n self.pool_type(\n kernel_size=(1, 1, stride)[: self.spatial_dims], stride=(1, 1, stride)[: self.spatial_dims]\n ),\n self.norm_type(planes * block.expansion),\n )\n\n layers = []\n layers.append(\n block(self.spatial_dims, self.inplanes, planes, (stride, stride, 1)[: self.spatial_dims], downsample)\n )\n self.inplanes = planes * block.expansion\n for _ in range(1, blocks):\n layers.append(block(self.spatial_dims, self.inplanes, planes))\n return nn.Sequential(*layers)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet.forward_AHNet.forward.return.self_final_x_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet.forward_AHNet.forward.return.self_final_x_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 485, "end_line": 518, "span_ids": ["AHNet.forward"], "tokens": 275}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AHNet(nn.Module):\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.pool1(x)\n x = self.bn0(x)\n x = self.relu(x)\n conv_x = x\n x = self.maxpool(x)\n pool_x = x\n\n fm1 = self.layer1(x)\n fm2 = self.layer2(fm1)\n fm3 = self.layer3(fm2)\n fm4 = self.layer4(fm3)\n\n sum0 = self.up0(fm4) + fm3\n d0 = self.dense0(sum0)\n\n sum1 = self.up1(d0) + fm2\n d1 = self.dense1(sum1)\n\n sum2 = self.up2(d1) + fm1\n d2 = self.dense2(sum2)\n\n sum3 = self.trans1(d2) + pool_x\n d3 = self.dense3(sum3)\n\n sum4 = self.up3(d3) + conv_x\n d4 = self.dense4(sum4)\n if self.psp_block_num > 0:\n psp = self.psp(d4)\n x = torch.cat((psp, d4), dim=1)\n else:\n x = d4\n return self.final(x)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_from_typing_import_Dict__DEFAULT_LAYER_PARAMS_3D._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/highresnet.py_from_typing_import_Dict__DEFAULT_LAYER_PARAMS_3D._", "embedding": null, "metadata": {"file_path": "monai/networks/nets/highresnet.py", "file_name": "highresnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 31, "span_ids": ["docstring"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Dict, Optional, Sequence, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks import ADN, Convolution\nfrom monai.networks.layers.simplelayers import ChannelPad\nfrom monai.utils import ChannelMatching\n\nDEFAULT_LAYER_PARAMS_3D = (\n # initial conv layer\n {\"name\": \"conv_0\", \"n_features\": 16, \"kernel_size\": 3},\n # residual blocks\n {\"name\": \"res_1\", \"n_features\": 16, \"kernels\": (3, 3), \"repeat\": 3},\n {\"name\": \"res_2\", \"n_features\": 32, \"kernels\": (3, 3), \"repeat\": 3},\n {\"name\": \"res_3\", \"n_features\": 64, \"kernels\": (3, 3), \"repeat\": 3},\n # final conv layers\n {\"name\": \"conv_1\", \"n_features\": 80, \"kernel_size\": 1},\n {\"name\": \"conv_2\", \"kernel_size\": 1},\n)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_from_typing_import_Sequen_UNet.__init__.self.model._create_block_in_channels": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/unet.py_from_typing_import_Sequen_UNet.__init__.self.model._create_block_in_channels", "embedding": null, "metadata": {"file_path": "monai/networks/nets/unet.py", "file_name": "unet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 107, "span_ids": ["UNet", "UNet.__init__", "docstring"], "tokens": 852}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Sequence, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks.convolutions import Convolution, ResidualUnit\nfrom monai.networks.layers.factories import Act, Norm\nfrom monai.networks.layers.simplelayers import SkipConnection\nfrom monai.utils import alias, export\n\n__all__ = [\"UNet\", \"Unet\", \"unet\"]\n\n\n@export(\"monai.networks.nets\")\n@alias(\"Unet\")\nclass UNet(nn.Module):\n def __init__(\n self,\n dimensions: int,\n in_channels: int,\n out_channels: int,\n channels: Sequence[int],\n strides: Sequence[int],\n kernel_size: Union[Sequence[int], int] = 3,\n up_kernel_size: Union[Sequence[int], int] = 3,\n num_res_units: int = 0,\n act=Act.PRELU,\n norm=Norm.INSTANCE,\n dropout=0.0,\n ) -> None:\n \"\"\"\n Enhanced version of UNet which has residual units implemented with the ResidualUnit class.\n The residual part uses a convolution to change the input dimensions to match the output dimensions\n if this is necessary but will use nn.Identity if not.\n Refer to: https://link.springer.com/chapter/10.1007/978-3-030-12029-0_40.\n\n Args:\n dimensions: number of spatial dimensions.\n in_channels: number of input channels.\n out_channels: number of output channels.\n channels: sequence of channels. Top block first.\n strides: convolution stride.\n kernel_size: convolution kernel size. Defaults to 3.\n up_kernel_size: upsampling convolution kernel size. Defaults to 3.\n num_res_units: number of residual units. Defaults to 0.\n act: activation type and arguments. Defaults to PReLU.\n norm: feature normalization type and arguments. Defaults to instance norm.\n dropout: dropout ratio. Defaults to no dropout.\n \"\"\"\n super().__init__()\n\n self.dimensions = dimensions\n self.in_channels = in_channels\n self.out_channels = out_channels\n self.channels = channels\n self.strides = strides\n self.kernel_size = kernel_size\n self.up_kernel_size = up_kernel_size\n self.num_res_units = num_res_units\n self.act = act\n self.norm = norm\n self.dropout = dropout\n\n def _create_block(\n inc: int, outc: int, channels: Sequence[int], strides: Sequence[int], is_top: bool\n ) -> nn.Sequential:\n \"\"\"\n Builds the UNet structure from the bottom up by recursing down to the bottom block, then creating sequential\n blocks containing the downsample path, a skip connection around the previous block, and the upsample path.\n\n Args:\n inc: number of input channels.\n outc: number of output channels.\n channels: sequence of channels. Top block first.\n strides: convolution stride.\n is_top: True if this is the top block.\n \"\"\"\n c = channels[0]\n s = strides[0]\n\n subblock: nn.Module\n\n if len(channels) > 2:\n subblock = _create_block(c, c, channels[1:], strides[1:], False) # continue recursion down\n upc = c * 2\n else:\n # the next layer is the bottom so stop recursion, create the bottom layer as the sublock for this layer\n subblock = self._get_bottom_layer(c, channels[1])\n upc = c + channels[1]\n\n down = self._get_down_layer(inc, c, s, is_top) # create layer in downsampling path\n up = self._get_up_layer(upc, outc, s, is_top) # create layer in upsampling path\n\n return nn.Sequential(down, SkipConnection(subblock), up)\n\n self.model = _create_block(in_channels, out_channels, self.channels, self.strides, True)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_from_typing_import_Dict__get_acti_layer.return.act_type_act_args_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_from_typing_import_Dict__get_acti_layer.return.act_type_act_args_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/vnet.py", "file_name": "vnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 26, "span_ids": ["get_acti_layer", "docstring"], "tokens": 129}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Dict, Optional, Tuple, Type, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks.convolutions import Convolution\nfrom monai.networks.layers.factories import Act, Conv, Dropout, Norm, split_args\n\n\ndef get_acti_layer(act: Union[Tuple[str, Dict], str], nchan: int = 0):\n if act == \"prelu\":\n act = (\"prelu\", {\"num_parameters\": nchan})\n act_name, act_args = split_args(act)\n act_type = Act[act_name]\n return act_type(**act_args)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_LUConv__make_nconv.return.nn_Sequential_layers_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_LUConv__make_nconv.return.nn_Sequential_layers_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/vnet.py", "file_name": "vnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 29, "end_line": 53, "span_ids": ["LUConv.__init__", "LUConv.forward", "LUConv", "_make_nconv"], "tokens": 195}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LUConv(nn.Module):\n def __init__(self, spatial_dims: int, nchan: int, act: Union[Tuple[str, Dict], str]):\n super(LUConv, self).__init__()\n\n self.act_function = get_acti_layer(act, nchan)\n self.conv_block = Convolution(\n dimensions=spatial_dims,\n in_channels=nchan,\n out_channels=nchan,\n kernel_size=5,\n act=None,\n norm=Norm.BATCH,\n )\n\n def forward(self, x):\n out = self.conv_block(x)\n out = self.act_function(out)\n return out\n\n\ndef _make_nconv(spatial_dims: int, nchan: int, depth: int, act: Union[Tuple[str, Dict], str]):\n layers = []\n for _ in range(depth):\n layers.append(LUConv(spatial_dims, nchan, act))\n return nn.Sequential(*layers)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_InputTransition_InputTransition.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_InputTransition_InputTransition.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/nets/vnet.py", "file_name": "vnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 56, "end_line": 80, "span_ids": ["InputTransition.forward", "InputTransition.__init__", "InputTransition"], "tokens": 226}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class InputTransition(nn.Module):\n def __init__(self, spatial_dims: int, in_channels: int, out_channels: int, act: Union[Tuple[str, Dict], str]):\n super(InputTransition, self).__init__()\n\n if 16 % in_channels != 0:\n raise ValueError(f\"16 should be divisible by in_channels, got in_channels={in_channels}.\")\n\n self.spatial_dims = spatial_dims\n self.in_channels = in_channels\n self.act_function = get_acti_layer(act, 16)\n self.conv_block = Convolution(\n dimensions=spatial_dims,\n in_channels=in_channels,\n out_channels=16,\n kernel_size=5,\n act=None,\n norm=Norm.BATCH,\n )\n\n def forward(self, x):\n out = self.conv_block(x)\n repeat_num = 16 // self.in_channels\n x16 = x.repeat([1, repeat_num, 1, 1, 1][: self.spatial_dims + 2])\n out = self.act_function(torch.add(out, x16))\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_DownTransition_DownTransition.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_DownTransition_DownTransition.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/nets/vnet.py", "file_name": "vnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 73, "end_line": 105, "span_ids": ["DownTransition.__init__", "DownTransition", "DownTransition.forward"], "tokens": 346}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DownTransition(nn.Module):\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n nconvs: int,\n act: Union[Tuple[str, Dict], str],\n dropout_prob: Optional[float] = None,\n dropout_dim: int = 3,\n ):\n super(DownTransition, self).__init__()\n\n conv_type: Type[Union[nn.Conv2d, nn.Conv3d]] = Conv[Conv.CONV, spatial_dims]\n norm_type: Type[Union[nn.BatchNorm2d, nn.BatchNorm3d]] = Norm[Norm.BATCH, spatial_dims]\n dropout_type: Type[Union[nn.Dropout, nn.Dropout2d, nn.Dropout3d]] = Dropout[Dropout.DROPOUT, dropout_dim]\n\n out_channels = 2 * in_channels\n self.down_conv = conv_type(in_channels, out_channels, kernel_size=2, stride=2)\n self.bn1 = norm_type(out_channels)\n self.act_function1 = get_acti_layer(act, out_channels)\n self.act_function2 = get_acti_layer(act, out_channels)\n self.ops = _make_nconv(spatial_dims, out_channels, nconvs, act)\n self.dropout = dropout_type(dropout_prob) if dropout_prob is not None else None\n\n def forward(self, x):\n down = self.act_function1(self.bn1(self.down_conv(x)))\n if self.dropout is not None:\n out = self.dropout(down)\n else:\n out = down\n out = self.ops(out)\n out = self.act_function2(torch.add(out, down))\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_UpTransition_UpTransition.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_UpTransition_UpTransition.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/nets/vnet.py", "file_name": "vnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 108, "end_line": 143, "span_ids": ["UpTransition.__init__", "UpTransition", "UpTransition.forward"], "tokens": 400}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class UpTransition(nn.Module):\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n out_channels: int,\n nconvs: int,\n act: Union[Tuple[str, Dict], str],\n dropout_prob: Optional[float] = None,\n dropout_dim: int = 3,\n ):\n super(UpTransition, self).__init__()\n\n conv_trans_type: Type[Union[nn.ConvTranspose2d, nn.ConvTranspose3d]] = Conv[Conv.CONVTRANS, spatial_dims]\n norm_type: Type[Union[nn.BatchNorm2d, nn.BatchNorm3d]] = Norm[Norm.BATCH, spatial_dims]\n dropout_type: Type[Union[nn.Dropout, nn.Dropout2d, nn.Dropout3d]] = Dropout[Dropout.DROPOUT, dropout_dim]\n\n self.up_conv = conv_trans_type(in_channels, out_channels // 2, kernel_size=2, stride=2)\n self.bn1 = norm_type(out_channels // 2)\n self.dropout = dropout_type(dropout_prob) if dropout_prob is not None else None\n self.dropout2 = dropout_type(0.5)\n self.act_function1 = get_acti_layer(act, out_channels // 2)\n self.act_function2 = get_acti_layer(act, out_channels)\n self.ops = _make_nconv(spatial_dims, out_channels, nconvs, act)\n\n def forward(self, x, skipx):\n if self.dropout is not None:\n out = self.dropout(x)\n else:\n out = x\n skipxdo = self.dropout2(skipx)\n out = self.act_function1(self.bn1(self.up_conv(out)))\n xcat = torch.cat((out, skipxdo), 1)\n out = self.ops(xcat)\n out = self.act_function2(torch.add(out, xcat))\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_OutputTransition_OutputTransition.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_OutputTransition_OutputTransition.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/nets/vnet.py", "file_name": "vnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 146, "end_line": 168, "span_ids": ["OutputTransition.__init__", "OutputTransition.forward", "OutputTransition"], "tokens": 202}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class OutputTransition(nn.Module):\n def __init__(self, spatial_dims: int, in_channels: int, out_channels: int, act: Union[Tuple[str, Dict], str]):\n super(OutputTransition, self).__init__()\n\n conv_type: Type[Union[nn.Conv2d, nn.Conv3d]] = Conv[Conv.CONV, spatial_dims]\n\n self.act_function1 = get_acti_layer(act, out_channels)\n self.conv_block = Convolution(\n dimensions=spatial_dims,\n in_channels=in_channels,\n out_channels=out_channels,\n kernel_size=5,\n act=None,\n norm=Norm.BATCH,\n )\n self.conv2 = conv_type(out_channels, out_channels, kernel_size=1)\n\n def forward(self, x):\n # convolve 32 down to 2 channels\n out = self.conv_block(x)\n out = self.act_function1(out)\n out = self.conv2(out)\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_VNet_VNet.__init__.self.out_tr.OutputTransition_spatial_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_VNet_VNet.__init__.self.out_tr.OutputTransition_spatial_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/vnet.py", "file_name": "vnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 181, "end_line": 227, "span_ids": ["VNet", "VNet.__init__"], "tokens": 662}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class VNet(nn.Module):\n \"\"\"\n V-Net based on `Fully Convolutional Neural Networks for Volumetric Medical Image Segmentation\n `_.\n Adapted from `the official Caffe implementation\n `_. and `another pytorch implementation\n `_.\n The model supports 2D or 3D inputs.\n\n Args:\n spatial_dims: spatial dimension of the input data. Defaults to 3.\n in_channels: number of input channels for the network. Defaults to 1.\n The value should meet the condition that ``16 % in_channels == 0``.\n out_channels: number of output channels for the network. Defaults to 1.\n act: activation type in the network. Defaults to ``(\"elu\", {\"inplace\": True})``.\n dropout_prob: dropout ratio. Defaults to 0.5. Defaults to 3.\n dropout_dim: determine the dimensions of dropout. Defaults to 3.\n\n - ``dropout_dim = 1``, randomly zeroes some of the elements for each channel.\n - ``dropout_dim = 2``, Randomly zeroes out entire channels (a channel is a 2D feature map).\n - ``dropout_dim = 3``, Randomly zeroes out entire channels (a channel is a 3D feature map).\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int = 3,\n in_channels: int = 1,\n out_channels: int = 1,\n act: Union[Tuple[str, Dict], str] = (\"elu\", {\"inplace\": True}),\n dropout_prob: float = 0.5,\n dropout_dim: int = 3,\n ):\n super().__init__()\n\n if spatial_dims not in (2, 3):\n raise AssertionError(\"spatial_dims can only be 2 or 3.\")\n\n self.in_tr = InputTransition(spatial_dims, in_channels, 16, act)\n self.down_tr32 = DownTransition(spatial_dims, 16, 1, act)\n self.down_tr64 = DownTransition(spatial_dims, 32, 2, act)\n self.down_tr128 = DownTransition(spatial_dims, 64, 3, act, dropout_prob=dropout_prob)\n self.down_tr256 = DownTransition(spatial_dims, 128, 2, act, dropout_prob=dropout_prob)\n self.up_tr256 = UpTransition(spatial_dims, 256, 256, 2, act, dropout_prob=dropout_prob)\n self.up_tr128 = UpTransition(spatial_dims, 256, 128, 2, act, dropout_prob=dropout_prob)\n self.up_tr64 = UpTransition(spatial_dims, 128, 64, 1, act)\n self.up_tr32 = UpTransition(spatial_dims, 64, 32, 1, act)\n self.out_tr = OutputTransition(spatial_dims, 32, out_channels, act)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_VNet.forward_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/vnet.py_VNet.forward_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/vnet.py", "file_name": "vnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 217, "end_line": 229, "span_ids": ["VNet.forward"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class VNet(nn.Module):\n\n def forward(self, x):\n out16 = self.in_tr(x)\n out32 = self.down_tr32(out16)\n out64 = self.down_tr64(out32)\n out128 = self.down_tr128(out64)\n out256 = self.down_tr256(out128)\n x = self.up_tr256(out256, out128)\n x = self.up_tr128(x, out64)\n x = self.up_tr64(x, out32)\n x = self.up_tr32(x, out16)\n x = self.out_tr(x)\n return x", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_normal_init_normal_init.if_getattr_m_weight_N.elif_cname_find_BatchNor.nn_init_constant__m_bias_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_normal_init_normal_init.if_getattr_m_weight_N.elif_cname_find_BatchNor.nn_init_constant__m_bias_", "embedding": null, "metadata": {"file_path": "monai/networks/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 152, "end_line": 170, "span_ids": ["normal_init"], "tokens": 266}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def normal_init(\n m, std: float = 0.02, normal_func: Callable[[torch.Tensor, float, float], Any] = torch.nn.init.normal_\n) -> None:\n \"\"\"\n Initialize the weight and bias tensors of `m' and its submodules to values from a normal distribution with a\n stddev of `std'. Weight tensors of convolution and linear modules are initialized with a mean of 0, batch\n norm modules with a mean of 1. The callable `normal_func', used to assign values, should have the same arguments\n as its default normal_(). This can be used with `nn.Module.apply` to visit submodules of a network.\n \"\"\"\n cname = m.__class__.__name__\n\n if getattr(m, \"weight\", None) is not None and (cname.find(\"Conv\") != -1 or cname.find(\"Linear\") != -1):\n normal_func(m.weight.data, 0.0, std)\n if getattr(m, \"bias\", None) is not None:\n nn.init.constant_(m.bias.data, 0.0)\n\n elif cname.find(\"BatchNorm\") != -1:\n normal_func(m.weight.data, 1.0, std)\n nn.init.constant_(m.bias.data, 0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_icnr_init_icnr_init.conv_weight_data_copy__ke": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/utils.py_icnr_init_icnr_init.conv_weight_data_copy__ke", "embedding": null, "metadata": {"file_path": "monai/networks/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 173, "end_line": 190, "span_ids": ["icnr_init"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def icnr_init(conv, upsample_factor, init=nn.init.kaiming_normal_):\n \"\"\"\n ICNR initialization for 2D/3D kernels adapted from Aitken et al.,2017 , \"Checkerboard artifact free\n sub-pixel convolution\".\n \"\"\"\n out_channels, in_channels, *dims = conv.weight.shape\n scale_factor = upsample_factor ** len(dims)\n\n oc2 = int(out_channels / scale_factor)\n\n kernel = torch.zeros([oc2, in_channels] + dims)\n kernel = init(kernel)\n kernel = kernel.transpose(0, 1)\n kernel = kernel.reshape(oc2, in_channels, -1)\n kernel = kernel.repeat(1, 1, scale_factor)\n kernel = kernel.reshape([in_channels, out_channels] + dims)\n kernel = kernel.transpose(0, 1)\n conv.weight.data.copy_(kernel)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_from_typing_import_Callab_adaptor.map_only_names.return._v_ditems_k_for_k_v_in": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/adaptors.py_from_typing_import_Callab_adaptor.map_only_names.return._v_ditems_k_for_k_v_in", "embedding": null, "metadata": {"file_path": "monai/transforms/adaptors.py", "file_name": "adaptors.py", "file_type": "text/x-python", "category": "implementation", "start_line": 125, "end_line": 147, "span_ids": ["adaptor", "imports"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Callable\n\nfrom monai.utils import export as _monai_export\n\n__all__ = [\"adaptor\", \"apply_alias\", \"to_kwargs\", \"FunctionSignature\"]\n\n\n@_monai_export(\"monai.transforms\")\ndef adaptor(function, outputs, inputs=None):\n def must_be_types_or_none(variable_name, variable, types):\n if variable is not None:\n if not isinstance(variable, types):\n raise TypeError(f\"'{variable_name}' must be None or one of {types} but is {type(variable)}\")\n\n def must_be_types(variable_name, variable, types):\n if not isinstance(variable, types):\n raise TypeError(f\"'{variable_name}' must be one of {types} but is {type(variable)}\")\n\n def map_names(ditems, input_map):\n return {input_map(k, k): v for k, v in ditems.items()}\n\n def map_only_names(ditems, input_map):\n return {v: ditems[k] for k, v in input_map.items()}\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd.randomize_RandSpatialCropd.randomize.if_self_random_center_.self._slices._slice_None_get_rand": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd.randomize_RandSpatialCropd.randomize.if_self_random_center_.self._slices._slice_None_get_rand", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 433, "end_line": 439, "span_ids": ["RandSpatialCropd.randomize"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandSpatialCropd(Randomizable, MapTransform, InvertibleTransform):\n\n def randomize(self, img_size: Sequence[int]) -> None:\n self._size = fall_back_tuple(self.roi_size, img_size)\n if self.random_size:\n self._size = [self.R.randint(low=self._size[i], high=img_size[i] + 1) for i in range(len(img_size))]\n if self.random_center:\n valid_size = get_valid_patch_size(img_size, self._size)\n self._slices = (slice(None),) + get_random_patch(img_size, valid_size, self.R)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd.__call___RandSpatialCropd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandSpatialCropd.__call___RandSpatialCropd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 441, "end_line": 454, "span_ids": ["RandSpatialCropd.__call__"], "tokens": 184}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandSpatialCropd(Randomizable, MapTransform, InvertibleTransform):\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n self.randomize(d[self.keys[0]].shape[1:]) # image shape from the first data key\n if self._size is None:\n raise AssertionError\n for key in self.key_iterator(d):\n if self.random_center:\n self.push_transform(d, key, {\"slices\": [(i.start, i.stop) for i in self._slices[1:]]}) # type: ignore\n d[key] = d[key][self._slices]\n else:\n self.push_transform(d, key)\n cropper = CenterSpatialCrop(self._size)\n d[key] = cropper(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensity_ScaleIntensity.__init__.self.factor.factor": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_ScaleIntensity_ScaleIntensity.__init__.self.factor.factor", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 243, "end_line": 261, "span_ids": ["ScaleIntensity", "ScaleIntensity.__init__"], "tokens": 196}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ScaleIntensity(Transform):\n \"\"\"\n Scale the intensity of input image to the given value range (minv, maxv).\n If `minv` and `maxv` not provided, use `factor` to scale image by ``v = v * (1 + factor)``.\n \"\"\"\n\n def __init__(\n self, minv: Optional[float] = 0.0, maxv: Optional[float] = 1.0, factor: Optional[float] = None\n ) -> None:\n \"\"\"\n Args:\n minv: minimum value of output data.\n maxv: maximum value of output data.\n factor: factor scale by ``v = v * (1 + factor)``. In order to use\n this parameter, please set `minv` and `maxv` into None.\n \"\"\"\n self.minv = minv\n self.maxv = maxv\n self.factor = factor", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_MaskIntensity_MaskIntensity.__init__.self.mask_data.mask_data": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_MaskIntensity_MaskIntensity.__init__.self.mask_data.mask_data", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 702, "end_line": 718, "span_ids": ["MaskIntensity.__init__", "MaskIntensity"], "tokens": 167}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MaskIntensity(Transform):\n \"\"\"\n Mask the intensity values of input image with the specified mask data.\n Mask data must have the same spatial size as the input image, and all\n the intensity values of input image corresponding to `0` in the mask\n data will be set to `0`, others will keep the original value.\n\n Args:\n mask_data: if `mask_data` is single channel, apply to every channel\n of input image. if multiple channels, the number of channels must\n match the input data. `mask_data` will be converted to `bool` values\n by `mask_data > 0` before applying transform to input image.\n\n \"\"\"\n\n def __init__(self, mask_data: Optional[np.ndarray]) -> None:\n self.mask_data = mask_data", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSmooth_GaussianSmooth.__call__.return.gaussian_filter_input_dat": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSmooth_GaussianSmooth.__call__.return.gaussian_filter_input_dat", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 828, "end_line": 849, "span_ids": ["GaussianSmooth.__call__", "GaussianSmooth", "GaussianSmooth.__init__"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianSmooth(Transform):\n \"\"\"\n Apply Gaussian smooth to the input data based on specified `sigma` parameter.\n A default value `sigma=1.0` is provided for reference.\n\n Args:\n sigma: if a list of values, must match the count of spatial dimensions of input data,\n and apply every value in the list to 1 spatial dimension. if only 1 value provided,\n use it for all spatial dimensions.\n approx: discrete Gaussian kernel type, available options are \"erf\", \"sampled\", and \"scalespace\".\n see also :py:meth:`monai.networks.layers.GaussianFilter`.\n\n \"\"\"\n\n def __init__(self, sigma: Union[Sequence[float], float] = 1.0, approx: str = \"erf\") -> None:\n self.sigma = sigma\n self.approx = approx\n\n def __call__(self, img: np.ndarray):\n gaussian_filter = GaussianFilter(img.ndim - 1, self.sigma, approx=self.approx)\n input_data = torch.as_tensor(np.ascontiguousarray(img), dtype=torch.float).unsqueeze(0)\n return gaussian_filter(input_data).squeeze(0).detach().numpy()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSmooth_RandGaussianSmooth.__call__.return.GaussianSmooth_sigma_sigm": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSmooth_RandGaussianSmooth.__call__.return.GaussianSmooth_sigma_sigm", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 852, "end_line": 895, "span_ids": ["RandGaussianSmooth.__call__", "RandGaussianSmooth.randomize", "RandGaussianSmooth", "RandGaussianSmooth.__init__"], "tokens": 449}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandGaussianSmooth(RandomizableTransform):\n \"\"\"\n Apply Gaussian smooth to the input data based on randomly selected `sigma` parameters.\n\n Args:\n sigma_x: randomly select sigma value for the first spatial dimension.\n sigma_y: randomly select sigma value for the second spatial dimension if have.\n sigma_z: randomly select sigma value for the third spatial dimension if have.\n prob: probability of Gaussian smooth.\n approx: discrete Gaussian kernel type, available options are \"erf\", \"sampled\", and \"scalespace\".\n see also :py:meth:`monai.networks.layers.GaussianFilter`.\n\n \"\"\"\n\n def __init__(\n self,\n sigma_x: Tuple[float, float] = (0.25, 1.5),\n sigma_y: Tuple[float, float] = (0.25, 1.5),\n sigma_z: Tuple[float, float] = (0.25, 1.5),\n prob: float = 0.1,\n approx: str = \"erf\",\n ) -> None:\n RandomizableTransform.__init__(self, prob)\n self.sigma_x = sigma_x\n self.sigma_y = sigma_y\n self.sigma_z = sigma_z\n self.approx = approx\n\n self.x = self.sigma_x[0]\n self.y = self.sigma_y[0]\n self.z = self.sigma_z[0]\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.x = self.R.uniform(low=self.sigma_x[0], high=self.sigma_x[1])\n self.y = self.R.uniform(low=self.sigma_y[0], high=self.sigma_y[1])\n self.z = self.R.uniform(low=self.sigma_z[0], high=self.sigma_z[1])\n\n def __call__(self, img: np.ndarray):\n self.randomize()\n if not self._do_transform:\n return img\n sigma = ensure_tuple_size(tup=(self.x, self.y, self.z), dim=img.ndim - 1)\n return GaussianSmooth(sigma=sigma, approx=self.approx)(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSharpen.__call___GaussianSharpen.__call__.return._blurred_f_self_alpha_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSharpen.__call___GaussianSharpen.__call__.return._blurred_f_self_alpha_", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 937, "end_line": 943, "span_ids": ["GaussianSharpen.__call__"], "tokens": 135}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianSharpen(Transform):\n\n def __call__(self, img: np.ndarray):\n gaussian_filter1 = GaussianFilter(img.ndim - 1, self.sigma1, approx=self.approx)\n gaussian_filter2 = GaussianFilter(img.ndim - 1, self.sigma2, approx=self.approx)\n input_data = torch.as_tensor(np.ascontiguousarray(img), dtype=torch.float).unsqueeze(0)\n blurred_f = gaussian_filter1(input_data)\n filter_blurred_f = gaussian_filter2(blurred_f)\n return (blurred_f + self.alpha * (blurred_f - filter_blurred_f)).squeeze(0).detach().numpy()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSharpen.randomize_RandGaussianSharpen.randomize.self.a.self_R_uniform_low_self_a": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSharpen.randomize_RandGaussianSharpen.randomize.self.a.self_R_uniform_low_self_a", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 990, "end_line": 1001, "span_ids": ["RandGaussianSharpen.randomize"], "tokens": 290}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandGaussianSharpen(RandomizableTransform):\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.x1 = self.R.uniform(low=self.sigma1_x[0], high=self.sigma1_x[1])\n self.y1 = self.R.uniform(low=self.sigma1_y[0], high=self.sigma1_y[1])\n self.z1 = self.R.uniform(low=self.sigma1_z[0], high=self.sigma1_z[1])\n sigma2_x = (self.sigma2_x, self.x1) if not isinstance(self.sigma2_x, Iterable) else self.sigma2_x\n sigma2_y = (self.sigma2_y, self.y1) if not isinstance(self.sigma2_y, Iterable) else self.sigma2_y\n sigma2_z = (self.sigma2_z, self.z1) if not isinstance(self.sigma2_z, Iterable) else self.sigma2_z\n self.x2 = self.R.uniform(low=sigma2_x[0], high=sigma2_x[1])\n self.y2 = self.R.uniform(low=sigma2_y[0], high=sigma2_y[1])\n self.z2 = self.R.uniform(low=sigma2_z[0], high=sigma2_z[1])\n self.a = self.R.uniform(low=self.alpha[0], high=self.alpha[1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_GaussianSmoothd_GaussianSmoothd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_GaussianSmoothd_GaussianSmoothd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 704, "end_line": 734, "span_ids": ["GaussianSmoothd", "GaussianSmoothd.__call__", "GaussianSmoothd.__init__"], "tokens": 286}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianSmoothd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.GaussianSmooth`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n sigma: if a list of values, must match the count of spatial dimensions of input data,\n and apply every value in the list to 1 spatial dimension. if only 1 value provided,\n use it for all spatial dimensions.\n approx: discrete Gaussian kernel type, available options are \"erf\", \"sampled\", and \"scalespace\".\n see also :py:meth:`monai.networks.layers.GaussianFilter`.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n sigma: Union[Sequence[float], float],\n approx: str = \"erf\",\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.converter = GaussianSmooth(sigma, approx=approx)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSmoothd_RandGaussianSmoothd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSmoothd_RandGaussianSmoothd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 737, "end_line": 785, "span_ids": ["RandGaussianSmoothd.__init__", "RandGaussianSmoothd", "RandGaussianSmoothd.randomize", "RandGaussianSmoothd.__call__"], "tokens": 557}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandGaussianSmoothd(RandomizableTransform, MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.GaussianSmooth`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n sigma_x: randomly select sigma value for the first spatial dimension.\n sigma_y: randomly select sigma value for the second spatial dimension if have.\n sigma_z: randomly select sigma value for the third spatial dimension if have.\n approx: discrete Gaussian kernel type, available options are \"erf\", \"sampled\", and \"scalespace\".\n see also :py:meth:`monai.networks.layers.GaussianFilter`.\n prob: probability of Gaussian smooth.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n sigma_x: Tuple[float, float] = (0.25, 1.5),\n sigma_y: Tuple[float, float] = (0.25, 1.5),\n sigma_z: Tuple[float, float] = (0.25, 1.5),\n approx: str = \"erf\",\n prob: float = 0.1,\n allow_missing_keys: bool = False,\n ) -> None:\n MapTransform.__init__(self, keys, allow_missing_keys)\n RandomizableTransform.__init__(self, prob)\n self.sigma_x, self.sigma_y, self.sigma_z = sigma_x, sigma_y, sigma_z\n self.approx = approx\n\n self.x, self.y, self.z = self.sigma_x[0], self.sigma_y[0], self.sigma_z[0]\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.x = self.R.uniform(low=self.sigma_x[0], high=self.sigma_x[1])\n self.y = self.R.uniform(low=self.sigma_y[0], high=self.sigma_y[1])\n self.z = self.R.uniform(low=self.sigma_z[0], high=self.sigma_z[1])\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n self.randomize()\n if not self._do_transform:\n return d\n for key in self.key_iterator(d):\n sigma = ensure_tuple_size(tup=(self.x, self.y, self.z), dim=d[key].ndim - 1)\n d[key] = GaussianSmooth(sigma=sigma, approx=self.approx)(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_GaussianSharpend_GaussianSharpend.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_GaussianSharpend_GaussianSharpend.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 788, "end_line": 824, "span_ids": ["GaussianSharpend.__init__", "GaussianSharpend.__call__", "GaussianSharpend"], "tokens": 408}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianSharpend(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.GaussianSharpen`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n sigma1: sigma parameter for the first gaussian kernel. if a list of values, must match the count\n of spatial dimensions of input data, and apply every value in the list to 1 spatial dimension.\n if only 1 value provided, use it for all spatial dimensions.\n sigma2: sigma parameter for the second gaussian kernel. if a list of values, must match the count\n of spatial dimensions of input data, and apply every value in the list to 1 spatial dimension.\n if only 1 value provided, use it for all spatial dimensions.\n alpha: weight parameter to compute the final result.\n approx: discrete Gaussian kernel type, available options are \"erf\", \"sampled\", and \"scalespace\".\n see also :py:meth:`monai.networks.layers.GaussianFilter`.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n sigma1: Union[Sequence[float], float] = 3.0,\n sigma2: Union[Sequence[float], float] = 1.0,\n alpha: float = 30.0,\n approx: str = \"erf\",\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.converter = GaussianSharpen(sigma1, sigma2, alpha, approx=approx)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n for key in self.key_iterator(d):\n d[key] = self.converter(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSharpend.randomize_RandGaussianSharpend.randomize.self.a.self_R_uniform_low_self_a": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSharpend.randomize_RandGaussianSharpend.randomize.self.a.self_R_uniform_low_self_a", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 876, "end_line": 887, "span_ids": ["RandGaussianSharpend.randomize"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandGaussianSharpend(RandomizableTransform, MapTransform):\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n self.x1 = self.R.uniform(low=self.sigma1_x[0], high=self.sigma1_x[1])\n self.y1 = self.R.uniform(low=self.sigma1_y[0], high=self.sigma1_y[1])\n self.z1 = self.R.uniform(low=self.sigma1_z[0], high=self.sigma1_z[1])\n sigma2_x = (self.sigma2_x, self.x1) if not isinstance(self.sigma2_x, Iterable) else self.sigma2_x\n sigma2_y = (self.sigma2_y, self.y1) if not isinstance(self.sigma2_y, Iterable) else self.sigma2_y\n sigma2_z = (self.sigma2_z, self.z1) if not isinstance(self.sigma2_z, Iterable) else self.sigma2_z\n self.x2 = self.R.uniform(low=sigma2_x[0], high=sigma2_x[1])\n self.y2 = self.R.uniform(low=sigma2_y[0], high=sigma2_y[1])\n self.z2 = self.R.uniform(low=sigma2_z[0], high=sigma2_z[1])\n self.a = self.R.uniform(low=self.alpha[0], high=self.alpha[1])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSharpend.__call___RandGaussianSharpend.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandGaussianSharpend.__call___RandGaussianSharpend.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 889, "end_line": 898, "span_ids": ["RandGaussianSharpend.__call__"], "tokens": 153}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandGaussianSharpend(RandomizableTransform, MapTransform):\n\n def __call__(self, data):\n d = dict(data)\n self.randomize()\n if not self._do_transform:\n return d\n for key in self.key_iterator(d):\n sigma1 = ensure_tuple_size(tup=(self.x1, self.y1, self.z1), dim=d[key].ndim - 1)\n sigma2 = ensure_tuple_size(tup=(self.x2, self.y2, self.z2), dim=d[key].ndim - 1)\n d[key] = GaussianSharpen(sigma1=sigma1, sigma2=sigma2, alpha=self.a, approx=self.approx)(d[key])\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_MeanEnsemble.__call___MeanEnsemble.__call__.return.torch_mean_img__dim_0_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_MeanEnsemble.__call___MeanEnsemble.__call__.return.torch_mean_img__dim_0_", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 395, "end_line": 406, "span_ids": ["MeanEnsemble.__call__"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MeanEnsemble(Transform):\n\n def __call__(self, img: Union[Sequence[torch.Tensor], torch.Tensor]) -> torch.Tensor:\n img_ = torch.stack(img) if isinstance(img, (tuple, list)) else torch.as_tensor(img)\n if self.weights is not None:\n self.weights = self.weights.to(img_.device)\n shape = tuple(self.weights.shape)\n for _ in range(img_.ndimension() - self.weights.ndimension()):\n shape += (1,)\n weights = self.weights.reshape(*shape)\n\n img_ = img_ * weights / weights.mean(dim=0, keepdim=True)\n\n return torch.mean(img_, dim=0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_VoteEnsemble_VoteEnsemble.__init__.self.num_classes.num_classes": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/array.py_VoteEnsemble_VoteEnsemble.__init__.self.num_classes.num_classes", "embedding": null, "metadata": {"file_path": "monai/transforms/post/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 409, "end_line": 430, "span_ids": ["VoteEnsemble.__init__", "VoteEnsemble"], "tokens": 234}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class VoteEnsemble(Transform):\n \"\"\"\n Execute vote ensemble on the input data.\n The input data can be a list or tuple of PyTorch Tensor with shape: [B[, C, H, W, D]],\n Or a single PyTorch Tensor with shape: [E, B[, C, H, W, D]], the `E` dimension represents\n the output data from different models.\n Typically, the input data is model output of segmentation task or classification task.\n\n Note:\n This vote transform expects the input data is discrete values. It can be multiple channels\n data in One-Hot format or single channel data. It will vote to select the most common data\n between items.\n The output data has the same shape as every item of the input data.\n\n Args:\n num_classes: if the input is single channel data instead of One-Hot, we can't get class number\n from channel, need to explicitly specify the number of classes to vote.\n\n \"\"\"\n\n def __init__(self, num_classes: Optional[int] = None) -> None:\n self.num_classes = num_classes", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_Ensembled_Ensembled.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_Ensembled_Ensembled.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/post/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 223, "end_line": 267, "span_ids": ["Ensembled", "Ensembled.__call__", "Ensembled.__init__"], "tokens": 421}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Ensembled(MapTransform):\n \"\"\"\n Base class of dictionary-based ensemble transforms.\n\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n ensemble: Callable[[Union[Sequence[torch.Tensor], torch.Tensor]], torch.Tensor],\n output_key: Optional[str] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n ensemble: callable method to execute ensemble on specified data.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n allow_missing_keys: don't raise exception if key is missing.\n\n Raises:\n TypeError: When ``ensemble`` is not ``callable``.\n ValueError: When ``len(keys) > 1`` and ``output_key=None``. Incompatible values.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n if not callable(ensemble):\n raise TypeError(f\"ensemble must be callable but is {type(ensemble).__name__}.\")\n self.ensemble = ensemble\n if len(self.keys) > 1 and output_key is None:\n raise ValueError(\"Incompatible values: len(self.keys) > 1 and output_key=None.\")\n self.output_key = output_key if output_key is not None else self.keys[0]\n\n def __call__(self, data: Mapping[Hashable, torch.Tensor]) -> Dict[Hashable, torch.Tensor]:\n d = dict(data)\n items: Union[List[torch.Tensor], torch.Tensor]\n if len(self.keys) == 1:\n items = d[self.keys[0]]\n else:\n items = [d[key] for key in self.key_iterator(d)]\n d[self.output_key] = self.ensemble(items)\n\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_MeanEnsembled_MeanEnsembled.__init__.super___init___keys_en": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/post/dictionary.py_MeanEnsembled_MeanEnsembled.__init__.super___init___keys_en", "embedding": null, "metadata": {"file_path": "monai/transforms/post/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 277, "end_line": 307, "span_ids": ["MeanEnsembled", "MeanEnsembled.__init__"], "tokens": 435}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MeanEnsembled(Ensembled):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.MeanEnsemble`.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n output_key: Optional[str] = None,\n weights: Optional[Union[Sequence[float], torch.Tensor, np.ndarray]] = None,\n ) -> None:\n \"\"\"\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n weights: can be a list or tuple of numbers for input data with shape: [E, B, C, H, W[, D]].\n or a Numpy ndarray or a PyTorch Tensor data.\n the `weights` will be added to input data from highest dimension, for example:\n 1. if the `weights` only has 1 dimension, it will be added to the `E` dimension of input data.\n 2. if the `weights` has 3 dimensions, it will be added to `E`, `B` and `C` dimensions.\n it's a typical practice to add weights for different classes:\n to ensemble 3 segmentation model outputs, every output has 4 channels(classes),\n so the input data shape can be: [3, B, 4, H, W, D].\n and add different `weights` for different classes, so the `weights` shape can be: [3, 1, 4].\n for example: `weights = [[[1, 2, 3, 4]], [[4, 3, 2, 1]], [[1, 1, 1, 1]]]`.\n\n \"\"\"\n ensemble = MeanEnsemble(weights=weights)\n super().__init__(keys, ensemble, output_key)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandRotate_RandRotate.__init__.self.z.0_0": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/array.py_RandRotate_RandRotate.__init__.self.z.0_0", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 655, "end_line": 714, "span_ids": ["RandRotate", "RandRotate.__init__"], "tokens": 744}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandRotate(RandomizableTransform):\n \"\"\"\n Randomly rotate the input arrays.\n\n Args:\n range_x: Range of rotation angle in radians in the plane defined by the first and second axes.\n If single number, angle is uniformly sampled from (-range_x, range_x).\n range_y: Range of rotation angle in radians in the plane defined by the first and third axes.\n If single number, angle is uniformly sampled from (-range_y, range_y).\n range_z: Range of rotation angle in radians in the plane defined by the second and third axes.\n If single number, angle is uniformly sampled from (-range_z, range_z).\n prob: Probability of rotation.\n keep_size: If it is False, the output shape is adapted so that the\n input array is contained completely in the output.\n If it is True, the output shape is the same as the input. Default is True.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"border\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n align_corners: Defaults to False.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n dtype: data type for resampling computation. Defaults to ``np.float64`` for best precision.\n If None, use the data type of input data. To be compatible with other modules,\n the output data type is always ``np.float32``.\n \"\"\"\n\n def __init__(\n self,\n range_x: Union[Tuple[float, float], float] = 0.0,\n range_y: Union[Tuple[float, float], float] = 0.0,\n range_z: Union[Tuple[float, float], float] = 0.0,\n prob: float = 0.1,\n keep_size: bool = True,\n mode: Union[GridSampleMode, str] = GridSampleMode.BILINEAR,\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.BORDER,\n align_corners: bool = False,\n dtype: DtypeLike = np.float64,\n ) -> None:\n RandomizableTransform.__init__(self, prob)\n self.range_x = ensure_tuple(range_x)\n if len(self.range_x) == 1:\n self.range_x = tuple(sorted([-self.range_x[0], self.range_x[0]]))\n self.range_y = ensure_tuple(range_y)\n if len(self.range_y) == 1:\n self.range_y = tuple(sorted([-self.range_y[0], self.range_y[0]]))\n self.range_z = ensure_tuple(range_z)\n if len(self.range_z) == 1:\n self.range_z = tuple(sorted([-self.range_z[0], self.range_z[0]]))\n\n self.keep_size = keep_size\n self.mode: GridSampleMode = GridSampleMode(mode)\n self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)\n self.align_corners = align_corners\n self.dtype = dtype\n\n self.x = 0.0\n self.y = 0.0\n self.z = 0.0", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd_Spacingd._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd_Spacingd._", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 117, "end_line": 129, "span_ids": ["Spacingd"], "tokens": 128}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Spacingd(MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.Spacing`.\n\n This transform assumes the ``data`` dictionary has a key for the input\n data's metadata and contains `affine` field. The key is formed by ``key_{meta_key_postfix}``.\n\n After resampling the input array, this transform will write the new affine\n to the `affine` field of metadata which is formed by ``key_{meta_key_postfix}``.\n\n see also:\n :py:class:`monai.transforms.Spacing`\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd.__init___Spacingd.__init__.self.meta_key_postfix.meta_key_postfix": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Spacingd.__init___Spacingd.__init__.self.meta_key_postfix.meta_key_postfix", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 131, "end_line": 191, "span_ids": ["Spacingd.__init__"], "tokens": 806}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Spacingd(MapTransform, InvertibleTransform):\n\n def __init__(\n self,\n keys: KeysCollection,\n pixdim: Sequence[float],\n diagonal: bool = False,\n mode: GridSampleModeSequence = GridSampleMode.BILINEAR,\n padding_mode: GridSamplePadModeSequence = GridSamplePadMode.BORDER,\n align_corners: Union[Sequence[bool], bool] = False,\n dtype: Optional[Union[Sequence[DtypeLike], DtypeLike]] = np.float64,\n meta_key_postfix: str = \"meta_dict\",\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n pixdim: output voxel spacing.\n diagonal: whether to resample the input to have a diagonal affine matrix.\n If True, the input data is resampled to the following affine::\n\n np.diag((pixdim_0, pixdim_1, pixdim_2, 1))\n\n This effectively resets the volume to the world coordinate system (RAS+ in nibabel).\n The original orientation, rotation, shearing are not preserved.\n\n If False, the axes orientation, orthogonal rotation and\n translations components from the original affine will be\n preserved in the target affine. This option will not flip/swap\n axes against the original ones.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"border\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of string, each element corresponds to a key in ``keys``.\n align_corners: Geometrically, we consider the pixels of the input as squares rather than points.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n It also can be a sequence of bool, each element corresponds to a key in ``keys``.\n dtype: data type for resampling computation. Defaults to ``np.float64`` for best precision.\n If None, use the data type of input data. To be compatible with other modules,\n the output data type is always ``np.float32``.\n It also can be a sequence of dtypes, each element corresponds to a key in ``keys``.\n meta_key_postfix: use `key_{postfix}` to to fetch the meta data according to the key data,\n default is `meta_dict`, the meta data is a dictionary object.\n For example, to handle key `image`, read/write affine matrices from the\n metadata `image_meta_dict` dictionary's `affine` field.\n allow_missing_keys: don't raise exception if key is missing.\n\n Raises:\n TypeError: When ``meta_key_postfix`` is not a ``str``.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.spacing_transform = Spacing(pixdim, diagonal=diagonal)\n self.mode = ensure_tuple_rep(mode, len(self.keys))\n self.padding_mode = ensure_tuple_rep(padding_mode, len(self.keys))\n self.align_corners = ensure_tuple_rep(align_corners, len(self.keys))\n self.dtype = ensure_tuple_rep(dtype, len(self.keys))\n if not isinstance(meta_key_postfix, str):\n raise TypeError(f\"meta_key_postfix must be a str but is {type(meta_key_postfix).__name__}.\")\n self.meta_key_postfix = meta_key_postfix", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Orientationd_Orientationd.__init__.self.meta_key_postfix.meta_key_postfix": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Orientationd_Orientationd.__init__.self.meta_key_postfix.meta_key_postfix", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 264, "end_line": 312, "span_ids": ["Orientationd.__init__", "Orientationd"], "tokens": 584}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Orientationd(MapTransform, InvertibleTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.Orientation`.\n\n This transform assumes the ``data`` dictionary has a key for the input\n data's metadata and contains `affine` field. The key is formed by ``key_{meta_key_postfix}``.\n\n After reorienting the input array, this transform will write the new affine\n to the `affine` field of metadata which is formed by ``key_{meta_key_postfix}``.\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n axcodes: Optional[str] = None,\n as_closest_canonical: bool = False,\n labels: Optional[Sequence[Tuple[str, str]]] = tuple(zip(\"LPI\", \"RAS\")),\n meta_key_postfix: str = \"meta_dict\",\n allow_missing_keys: bool = False,\n ) -> None:\n \"\"\"\n Args:\n axcodes: N elements sequence for spatial ND input's orientation.\n e.g. axcodes='RAS' represents 3D orientation:\n (Left, Right), (Posterior, Anterior), (Inferior, Superior).\n default orientation labels options are: 'L' and 'R' for the first dimension,\n 'P' and 'A' for the second, 'I' and 'S' for the third.\n as_closest_canonical: if True, load the image as closest to canonical axis format.\n labels: optional, None or sequence of (2,) sequences\n (2,) sequences are labels for (beginning, end) of output axis.\n Defaults to ``(('L', 'R'), ('P', 'A'), ('I', 'S'))``.\n meta_key_postfix: use `key_{postfix}` to to fetch the meta data according to the key data,\n default is `meta_dict`, the meta data is a dictionary object.\n For example, to handle key `image`, read/write affine matrices from the\n metadata `image_meta_dict` dictionary's `affine` field.\n allow_missing_keys: don't raise exception if key is missing.\n\n Raises:\n TypeError: When ``meta_key_postfix`` is not a ``str``.\n\n See Also:\n `nibabel.orientations.ornt2axcodes`.\n\n \"\"\"\n super().__init__(keys, allow_missing_keys)\n self.ornt_transform = Orientation(axcodes=axcodes, as_closest_canonical=as_closest_canonical, labels=labels)\n if not isinstance(meta_key_postfix, str):\n raise TypeError(f\"meta_key_postfix must be a str but is {type(meta_key_postfix).__name__}.\")\n self.meta_key_postfix = meta_key_postfix", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Orientationd.__call___Orientationd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/spatial/dictionary.py_Orientationd.__call___Orientationd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/spatial/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 314, "end_line": 324, "span_ids": ["Orientationd.__call__"], "tokens": 174}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Orientationd(MapTransform, InvertibleTransform):\n\n def __call__(\n self, data: Mapping[Union[Hashable, str], Dict[str, np.ndarray]]\n ) -> Dict[Union[Hashable, str], Union[np.ndarray, Dict[str, np.ndarray]]]:\n d: Dict = dict(data)\n for key in self.key_iterator(d):\n meta_data_key = f\"{key}_{self.meta_key_postfix}\"\n meta_data = d[meta_data_key]\n d[key], old_affine, new_affine = self.ornt_transform(d[key], affine=meta_data[\"affine\"])\n self.push_transform(d, key, extra_info={\"meta_data_key\": meta_data_key, \"old_affine\": old_affine})\n d[meta_data_key][\"affine\"] = new_affine\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_logging_Identity.__call__.return.np_asanyarray_img_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_logging_Identity.__call__.return.np_asanyarray_img_", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 16, "end_line": 70, "span_ids": ["Identity.__call__", "Identity", "docstring"], "tokens": 371}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nimport sys\nimport time\nfrom typing import Callable, Dict, List, Optional, Sequence, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom monai.config import DtypeLike, NdarrayTensor\nfrom monai.transforms.transform import Randomizable, Transform\nfrom monai.transforms.utils import extreme_points_to_image, get_extreme_points, map_binary_to_indices\nfrom monai.utils import ensure_tuple, min_version, optional_import\n\nPILImageImage, has_pil = optional_import(\"PIL.Image\", name=\"Image\")\npil_image_fromarray, _ = optional_import(\"PIL.Image\", name=\"fromarray\")\n\n__all__ = [\n \"Identity\",\n \"AsChannelFirst\",\n \"AsChannelLast\",\n \"AddChannel\",\n \"EnsureChannelFirst\",\n \"RepeatChannel\",\n \"RemoveRepeatedChannel\",\n \"SplitChannel\",\n \"CastToType\",\n \"ToTensor\",\n \"ToNumpy\",\n \"Transpose\",\n \"SqueezeDim\",\n \"DataStats\",\n \"SimulateDelay\",\n \"Lambda\",\n \"LabelToMask\",\n \"FgBgToIndices\",\n \"ConvertToMultiChannelBasedOnBratsClasses\",\n \"AddExtremePointsChannel\",\n \"TorchVision\",\n \"MapLabelValue\",\n]\n\n\nclass Identity(Transform):\n \"\"\"\n Convert the input to an np.ndarray, if input data is np.ndarray or subclasses, return unchanged data.\n As the output value is same as input, it can be used as a testing tool to verify the transform chain,\n Compose or transform adaptor, etc.\n\n \"\"\"\n\n def __call__(self, img: Union[np.ndarray, torch.Tensor]) -> np.ndarray:\n \"\"\"\n Apply the transform to `img`.\n \"\"\"\n return np.asanyarray(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_Lambda_Lambda.__init__.self.func.func": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_Lambda_Lambda.__init__.self.func.func", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 358, "end_line": 383, "span_ids": ["Lambda", "Lambda.__init__"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Lambda(Transform):\n \"\"\"\n Apply a user-defined lambda as a transform.\n\n For example:\n\n .. code-block:: python\n :emphasize-lines: 2\n\n image = np.ones((10, 2, 2))\n lambd = Lambda(func=lambda x: x[:4, :, :])\n print(lambd(image).shape)\n (4, 2, 2)\n\n Args:\n func: Lambda/function to be applied.\n\n Raises:\n TypeError: When ``func`` is not an ``Optional[Callable]``.\n\n \"\"\"\n\n def __init__(self, func: Optional[Callable] = None) -> None:\n if func is not None and not callable(func):\n raise TypeError(f\"func must be None or callable but is {type(func).__name__}.\")\n self.func = func", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_copy_from_monai_utils_import_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_copy_from_monai_utils_import_e", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 18, "end_line": 53, "span_ids": ["docstring"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import copy\nimport logging\nfrom copy import deepcopy\nfrom typing import Any, Callable, Dict, Hashable, List, Mapping, Optional, Sequence, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom monai.config import DtypeLike, KeysCollection, NdarrayTensor\nfrom monai.transforms.inverse import InvertibleTransform\nfrom monai.transforms.transform import MapTransform, Randomizable, RandomizableTransform\nfrom monai.transforms.utility.array import (\n AddChannel,\n AsChannelFirst,\n AsChannelLast,\n CastToType,\n ConvertToMultiChannelBasedOnBratsClasses,\n DataStats,\n EnsureChannelFirst,\n FgBgToIndices,\n Identity,\n LabelToMask,\n Lambda,\n MapLabelValue,\n RemoveRepeatedChannel,\n RepeatChannel,\n SimulateDelay,\n SplitChannel,\n SqueezeDim,\n ToNumpy,\n ToPIL,\n TorchVision,\n ToTensor,\n)\nfrom monai.transforms.utils import extreme_points_to_image, get_extreme_points\nfrom monai.utils import ensure_tuple, ensure_tuple_rep", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_rotate_create_rotate.raise_ValueError_f_Unsupp": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_create_rotate_create_rotate.raise_ValueError_f_Unsupp", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 420, "end_line": 467, "span_ids": ["create_rotate"], "tokens": 652}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def create_rotate(spatial_dims: int, radians: Union[Sequence[float], float]) -> np.ndarray:\n \"\"\"\n create a 2D or 3D rotation matrix\n\n Args:\n spatial_dims: {``2``, ``3``} spatial rank\n radians: rotation radians\n when spatial_dims == 3, the `radians` sequence corresponds to\n rotation in the 1st, 2nd, and 3rd dim respectively.\n\n Raises:\n ValueError: When ``radians`` is empty.\n ValueError: When ``spatial_dims`` is not one of [2, 3].\n\n \"\"\"\n radians = ensure_tuple(radians)\n if spatial_dims == 2:\n if len(radians) >= 1:\n sin_, cos_ = np.sin(radians[0]), np.cos(radians[0])\n return np.array([[cos_, -sin_, 0.0], [sin_, cos_, 0.0], [0.0, 0.0, 1.0]])\n raise ValueError(\"radians must be non empty.\")\n\n if spatial_dims == 3:\n affine = None\n if len(radians) >= 1:\n sin_, cos_ = np.sin(radians[0]), np.cos(radians[0])\n affine = np.array(\n [[1.0, 0.0, 0.0, 0.0], [0.0, cos_, -sin_, 0.0], [0.0, sin_, cos_, 0.0], [0.0, 0.0, 0.0, 1.0]]\n )\n if len(radians) >= 2:\n sin_, cos_ = np.sin(radians[1]), np.cos(radians[1])\n if affine is None:\n raise ValueError(\"Affine should be a matrix.\")\n affine = affine @ np.array(\n [[cos_, 0.0, sin_, 0.0], [0.0, 1.0, 0.0, 0.0], [-sin_, 0.0, cos_, 0.0], [0.0, 0.0, 0.0, 1.0]]\n )\n if len(radians) >= 3:\n sin_, cos_ = np.sin(radians[2]), np.cos(radians[2])\n if affine is None:\n raise ValueError(\"Affine should be a matrix.\")\n affine = affine @ np.array(\n [[cos_, -sin_, 0.0, 0.0], [sin_, cos_, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]\n )\n if affine is None:\n raise ValueError(\"radians must be non empty.\")\n return affine\n\n raise ValueError(f\"Unsupported spatial_dims: {spatial_dims}, available options are [2, 3].\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/aliases.py_importlib_alias.return._outer": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/aliases.py_importlib_alias.return._outer", "embedding": null, "metadata": {"file_path": "monai/utils/aliases.py", "file_name": "aliases.py", "file_type": "text/x-python", "category": "implementation", "start_line": 16, "end_line": 43, "span_ids": ["alias", "docstring"], "tokens": 162}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import importlib\nimport inspect\nimport sys\nimport threading\n\nalias_lock = threading.RLock()\nGlobalAliases = {}\n\n__all__ = [\"alias\", \"resolve_name\"]\n\n\ndef alias(*names):\n \"\"\"\n Stores the decorated function or class in the global aliases table under the given names and as the `__aliases__`\n member of the decorated object. This new member will contain all alias names declared for that object.\n \"\"\"\n\n def _outer(obj):\n for n in names:\n with alias_lock:\n GlobalAliases[n] = obj\n\n # set the member list __aliases__ to contain the alias names defined by the decorator for `obj`\n obj.__aliases__ = getattr(obj, \"__aliases__\", ()) + tuple(names)\n\n return obj\n\n return _outer", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_collections.abc_ensure_tuple_size.return.tuple_tup_dim_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_collections.abc_ensure_tuple_size.return.tuple_tup_dim_", "embedding": null, "metadata": {"file_path": "monai/utils/misc.py", "file_name": "misc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 100, "span_ids": ["zip_with", "ensure_tuple_size", "first", "docstring", "ensure_tuple", "star_zip_with", "issequenceiterable"], "tokens": 604}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import collections.abc\nimport inspect\nimport itertools\nimport random\nimport types\nimport warnings\nfrom ast import literal_eval\nfrom distutils.util import strtobool\nfrom typing import Any, Callable, Optional, Sequence, Tuple, Union, cast\n\nimport numpy as np\nimport torch\n\n__all__ = [\n \"zip_with\",\n \"star_zip_with\",\n \"first\",\n \"issequenceiterable\",\n \"ensure_tuple\",\n \"ensure_tuple_size\",\n \"ensure_tuple_rep\",\n \"fall_back_tuple\",\n \"is_scalar_tensor\",\n \"is_scalar\",\n \"progress_bar\",\n \"get_seed\",\n \"set_determinism\",\n \"list_to_dict\",\n \"dtype_torch_to_numpy\",\n \"dtype_numpy_to_torch\",\n \"MAX_SEED\",\n \"copy_to_device\",\n \"ImageMetaKey\",\n]\n\n_seed = None\n_flag_deterministic = torch.backends.cudnn.deterministic\n_flag_cudnn_benchmark = torch.backends.cudnn.benchmark\nMAX_SEED = np.iinfo(np.uint32).max + 1 # 2**32, the actual seed should be in [0, MAX_SEED - 1] for uint32\n\n\ndef zip_with(op, *vals, mapfunc=map):\n \"\"\"\n Map `op`, using `mapfunc`, to each tuple derived from zipping the iterables in `vals`.\n \"\"\"\n return mapfunc(op, zip(*vals))\n\n\ndef star_zip_with(op, *vals):\n \"\"\"\n Use starmap as the mapping function in zipWith.\n \"\"\"\n return zip_with(op, *vals, mapfunc=itertools.starmap)\n\n\ndef first(iterable, default=None):\n \"\"\"\n Returns the first item in the given iterable or `default` if empty, meaningful mostly with 'for' expressions.\n \"\"\"\n for i in iterable:\n return i\n return default\n\n\ndef issequenceiterable(obj: Any) -> bool:\n \"\"\"\n Determine if the object is an iterable sequence and is not a string.\n \"\"\"\n if isinstance(obj, torch.Tensor):\n return int(obj.dim()) > 0 # a 0-d tensor is not iterable\n return isinstance(obj, collections.abc.Iterable) and not isinstance(obj, str)\n\n\ndef ensure_tuple(vals: Any) -> Tuple[Any, ...]:\n \"\"\"\n Returns a tuple of `vals`.\n \"\"\"\n if not issequenceiterable(vals):\n vals = (vals,)\n\n return tuple(vals)\n\n\ndef ensure_tuple_size(tup: Any, dim: int, pad_val: Any = 0) -> Tuple[Any, ...]:\n \"\"\"\n Returns a copy of `tup` with `dim` values by either shortened or padded with `pad_val` as necessary.\n \"\"\"\n tup = ensure_tuple(tup) + (pad_val,) * dim\n return tuple(tup[:dim])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_ensure_tuple_rep_ensure_tuple_rep.raise_ValueError_f_Sequen": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_ensure_tuple_rep_ensure_tuple_rep.raise_ValueError_f_Sequen", "embedding": null, "metadata": {"file_path": "monai/utils/misc.py", "file_name": "misc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 103, "end_line": 131, "span_ids": ["ensure_tuple_rep"], "tokens": 264}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def ensure_tuple_rep(tup: Any, dim: int) -> Tuple[Any, ...]:\n \"\"\"\n Returns a copy of `tup` with `dim` values by either shortened or duplicated input.\n\n Raises:\n ValueError: When ``tup`` is a sequence and ``tup`` length is not ``dim``.\n\n Examples::\n\n >>> ensure_tuple_rep(1, 3)\n (1, 1, 1)\n >>> ensure_tuple_rep(None, 3)\n (None, None, None)\n >>> ensure_tuple_rep('test', 3)\n ('test', 'test', 'test')\n >>> ensure_tuple_rep([1, 2, 3], 3)\n (1, 2, 3)\n >>> ensure_tuple_rep(range(3), 3)\n (0, 1, 2)\n >>> ensure_tuple_rep([1, 2], 3)\n ValueError: Sequence must have length 3, got length 2.\n\n \"\"\"\n if not issequenceiterable(tup):\n return (tup,) * dim\n if len(tup) == dim:\n return tuple(tup)\n\n raise ValueError(f\"Sequence must have length {dim}, got {len(tup)}.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py__image3_animated_gif__image3_animated_gif.return.Summary_value_image_summ": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/visualize/img2tensorboard.py__image3_animated_gif__image3_animated_gif.return.Summary_value_image_summ", "embedding": null, "metadata": {"file_path": "monai/visualize/img2tensorboard.py", "file_name": "img2tensorboard.py", "file_type": "text/x-python", "category": "implementation", "start_line": 34, "end_line": 58, "span_ids": ["_image3_animated_gif"], "tokens": 363}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _image3_animated_gif(tag: str, image: Union[np.ndarray, torch.Tensor], scale_factor: float = 1.0) -> Summary:\n \"\"\"Function to actually create the animated gif.\n\n Args:\n tag: Data identifier\n image: 3D image tensors expected to be in `HWD` format\n scale_factor: amount to multiply values by. if the image data is between 0 and 1, using 255 for this value will\n scale it to displayable range\n \"\"\"\n if len(image.shape) != 3:\n raise AssertionError(\"3D image tensors expected to be in `HWD` format, len(image.shape) != 3\")\n\n ims = [(np.asarray((image[:, :, i])) * scale_factor).astype(np.uint8) for i in range(image.shape[2])]\n ims = [GifImage.fromarray(im) for im in ims]\n img_str = b\"\"\n for b_data in PIL.GifImagePlugin.getheader(ims[0])[0]:\n img_str += b_data\n img_str += b\"\\x21\\xFF\\x0B\\x4E\\x45\\x54\\x53\\x43\\x41\\x50\" b\"\\x45\\x32\\x2E\\x30\\x03\\x01\\x00\\x00\\x00\"\n for i in ims:\n for b_data in PIL.GifImagePlugin.getdata(i):\n img_str += b_data\n img_str += b\"\\x3B\"\n summary_image_str = Summary.Image(height=10, width=10, colorspace=1, encoded_image_string=img_str)\n image_summary = Summary.Value(tag=tag, image=summary_image_str)\n return Summary(value=[image_summary])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_itertools_TestAdaptors.test_single_in_single_out.None_4": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_adaptors.py_itertools_TestAdaptors.test_single_in_single_out.None_4", "embedding": null, "metadata": {"file_path": "tests/test_adaptors.py", "file_name": "test_adaptors.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 53, "span_ids": ["TestAdaptors.test_function_signature", "TestAdaptors.test_single_in_single_out", "TestAdaptors", "docstring"], "tokens": 338}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import itertools\nimport unittest\n\nfrom monai.transforms.adaptors import FunctionSignature, adaptor, apply_alias, to_kwargs\n\n\nclass TestAdaptors(unittest.TestCase):\n def test_function_signature(self):\n def foo(image, label=None, *a, **kw):\n pass\n\n f = FunctionSignature(foo)\n\n def test_single_in_single_out(self):\n def foo(image):\n return image * 2\n\n it = itertools.product([\"image\", [\"image\"]], [None, \"image\", [\"image\"], {\"image\": \"image\"}])\n for i in it:\n d = {\"image\": 2}\n dres = adaptor(foo, i[0], i[1])(d)\n self.assertEqual(dres[\"image\"], 4)\n\n d = {\"image\": 2}\n dres = adaptor(foo, \"image\")(d)\n self.assertEqual(dres[\"image\"], 4)\n\n d = {\"image\": 2}\n dres = adaptor(foo, \"image\", \"image\")(d)\n self.assertEqual(dres[\"image\"], 4)\n\n d = {\"image\": 2}\n dres = adaptor(foo, \"image\", {\"image\": \"image\"})(d)\n self.assertEqual(dres[\"image\"], 4)\n\n d = {\"img\": 2}\n dres = adaptor(foo, \"img\", {\"img\": \"image\"})(d)\n self.assertEqual(dres[\"img\"], 4)\n\n d = {\"img\": 2}\n dres = adaptor(foo, [\"img\"], {\"img\": \"image\"})(d)\n self.assertEqual(dres[\"img\"], 4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestToNormAffine.test_to_norm_affine_ill_TestToNormAffine.test_to_norm_affine_ill.with_self_assertRaises_Va.to_norm_affine_affine_sr": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_affine_transform.py_TestToNormAffine.test_to_norm_affine_ill_TestToNormAffine.test_to_norm_affine_ill.with_self_assertRaises_Va.to_norm_affine_affine_sr", "embedding": null, "metadata": {"file_path": "tests/test_affine_transform.py", "file_name": "test_affine_transform.py", "file_type": "text/x-python", "category": "test", "start_line": 100, "end_line": 106, "span_ids": ["TestToNormAffine.test_to_norm_affine_ill"], "tokens": 119}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestToNormAffine(unittest.TestCase):\n\n @parameterized.expand(TEST_ILL_TO_NORM_AFFINE_CASES)\n def test_to_norm_affine_ill(self, affine, src_size, dst_size, align_corners):\n with self.assertRaises(TypeError):\n to_norm_affine(affine, src_size, dst_size, align_corners)\n with self.assertRaises(ValueError):\n affine = torch.as_tensor(affine, device=torch.device(\"cpu:0\"), dtype=torch.float32)\n to_norm_affine(affine, src_size, dst_size, align_corners)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset_TestArrayDataset.test_shape.with_tempfile_TemporaryDi.None_13": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset_TestArrayDataset.test_shape.with_tempfile_TemporaryDi.None_13", "embedding": null, "metadata": {"file_path": "tests/test_arraydataset.py", "file_name": "test_arraydataset.py", "file_type": "text/x-python", "category": "test", "start_line": 57, "end_line": 90, "span_ids": ["TestArrayDataset", "TestArrayDataset.test_shape"], "tokens": 480}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestArrayDataset(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_shape(self, img_transform, label_transform, indices, expected_shape):\n test_image = nib.Nifti1Image(np.random.randint(0, 2, size=(128, 128, 128)), np.eye(4))\n with tempfile.TemporaryDirectory() as tempdir:\n test_image1 = os.path.join(tempdir, \"test_image1.nii.gz\")\n test_seg1 = os.path.join(tempdir, \"test_seg1.nii.gz\")\n test_image2 = os.path.join(tempdir, \"test_image2.nii.gz\")\n test_seg2 = os.path.join(tempdir, \"test_seg2.nii.gz\")\n nib.save(test_image, test_image1)\n nib.save(test_image, test_seg1)\n nib.save(test_image, test_image2)\n nib.save(test_image, test_seg2)\n test_images = [test_image1, test_image2]\n test_segs = [test_seg1, test_seg2]\n test_labels = [1, 1]\n dataset = ArrayDataset(test_images, img_transform, test_segs, label_transform, test_labels, None)\n self.assertEqual(len(dataset), 2)\n dataset.set_random_state(1234)\n data1 = dataset[0]\n data2 = dataset[1]\n\n self.assertTupleEqual(data1[indices[0]].shape, expected_shape)\n self.assertTupleEqual(data1[indices[1]].shape, expected_shape)\n np.testing.assert_allclose(data1[indices[0]], data1[indices[1]])\n self.assertTupleEqual(data2[indices[0]].shape, expected_shape)\n self.assertTupleEqual(data2[indices[1]].shape, expected_shape)\n np.testing.assert_allclose(data2[indices[0]], data2[indices[0]])\n\n dataset = ArrayDataset(test_images, img_transform, test_segs, label_transform, test_labels, None)\n dataset.set_random_state(1234)\n _ = dataset[0]\n data2_new = dataset[1]\n np.testing.assert_allclose(data2[indices[0]], data2_new[indices[0]], atol=1e-3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_default_none_TestArrayDataset.test_default_none.with_tempfile_TemporaryDi.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_default_none_TestArrayDataset.test_default_none.with_tempfile_TemporaryDi.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_arraydataset.py", "file_name": "test_arraydataset.py", "file_type": "text/x-python", "category": "test", "start_line": 92, "end_line": 113, "span_ids": ["TestArrayDataset.test_default_none"], "tokens": 270}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestArrayDataset(unittest.TestCase):\n\n @parameterized.expand([TEST_CASE_4])\n def test_default_none(self, img_transform, expected_shape):\n test_image = nib.Nifti1Image(np.random.randint(0, 2, size=(128, 128, 128)), np.eye(4))\n with tempfile.TemporaryDirectory() as tempdir:\n test_image1 = os.path.join(tempdir, \"test_image1.nii.gz\")\n test_image2 = os.path.join(tempdir, \"test_image2.nii.gz\")\n nib.save(test_image, test_image1)\n nib.save(test_image, test_image2)\n test_images = [test_image1, test_image2]\n dataset = ArrayDataset(test_images, img_transform)\n self.assertEqual(len(dataset), 2)\n dataset.set_random_state(1234)\n data1 = dataset[0]\n data2 = dataset[1]\n self.assertTupleEqual(data1.shape, expected_shape)\n self.assertTupleEqual(data2.shape, expected_shape)\n\n dataset = ArrayDataset(test_images, img_transform)\n dataset.set_random_state(1234)\n _ = dataset[0]\n data2_new = dataset[1]\n np.testing.assert_allclose(data2, data2_new, atol=1e-3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_dataloading_img_TestArrayDataset.test_dataloading_img.with_tempfile_TemporaryDi.None_6": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_dataloading_img_TestArrayDataset.test_dataloading_img.with_tempfile_TemporaryDi.None_6", "embedding": null, "metadata": {"file_path": "tests/test_arraydataset.py", "file_name": "test_arraydataset.py", "file_type": "text/x-python", "category": "test", "start_line": 116, "end_line": 135, "span_ids": ["TestArrayDataset.test_dataloading_img"], "tokens": 282}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestArrayDataset(unittest.TestCase):\n\n @parameterized.expand([TEST_CASE_4])\n def test_dataloading_img(self, img_transform, expected_shape):\n test_image = nib.Nifti1Image(np.random.randint(0, 2, size=(128, 128, 128)), np.eye(4))\n with tempfile.TemporaryDirectory() as tempdir:\n test_image1 = os.path.join(tempdir, \"test_image1.nii.gz\")\n test_image2 = os.path.join(tempdir, \"test_image2.nii.gz\")\n nib.save(test_image, test_image1)\n nib.save(test_image, test_image2)\n test_images = [test_image1, test_image2]\n dataset = ArrayDataset(test_images, img_transform)\n self.assertEqual(len(dataset), 2)\n dataset.set_random_state(1234)\n n_workers = 0 if sys.platform == \"win32\" else 2\n loader = DataLoader(dataset, batch_size=10, num_workers=n_workers)\n imgs = next(iter(loader)) # test batching\n np.testing.assert_allclose(imgs.shape, [2] + list(expected_shape))\n\n dataset.set_random_state(1234)\n new_imgs = next(iter(loader)) # test batching\n np.testing.assert_allclose(imgs, new_imgs, atol=1e-3)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_dataloading_img_label_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_arraydataset.py_TestArrayDataset.test_dataloading_img_label_", "embedding": null, "metadata": {"file_path": "tests/test_arraydataset.py", "file_name": "test_arraydataset.py", "file_type": "text/x-python", "category": "test", "start_line": 137, "end_line": 166, "span_ids": ["impl:9", "TestArrayDataset.test_dataloading_img_label"], "tokens": 375}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestArrayDataset(unittest.TestCase):\n\n @parameterized.expand([TEST_CASE_4])\n def test_dataloading_img_label(self, img_transform, expected_shape):\n test_image = nib.Nifti1Image(np.random.randint(0, 2, size=(128, 128, 128)), np.eye(4))\n with tempfile.TemporaryDirectory() as tempdir:\n test_image1 = os.path.join(tempdir, \"test_image1.nii.gz\")\n test_image2 = os.path.join(tempdir, \"test_image2.nii.gz\")\n test_label1 = os.path.join(tempdir, \"test_label1.nii.gz\")\n test_label2 = os.path.join(tempdir, \"test_label2.nii.gz\")\n nib.save(test_image, test_image1)\n nib.save(test_image, test_image2)\n nib.save(test_image, test_label1)\n nib.save(test_image, test_label2)\n test_images = [test_image1, test_image2]\n test_labels = [test_label1, test_label2]\n dataset = ArrayDataset(test_images, img_transform, test_labels, img_transform)\n self.assertEqual(len(dataset), 2)\n dataset.set_random_state(1234)\n n_workers = 0 if sys.platform == \"win32\" else 2\n loader = DataLoader(dataset, batch_size=10, num_workers=n_workers)\n data = next(iter(loader)) # test batching\n np.testing.assert_allclose(data[0].shape, [2] + list(expected_shape))\n\n dataset.set_random_state(1234)\n new_data = next(iter(loader)) # test batching\n np.testing.assert_allclose(data[0], new_data[0], atol=1e-3)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachedataset.py_os_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_cachedataset.py_os_", "embedding": null, "metadata": {"file_path": "tests/test_cachedataset.py", "file_name": "test_cachedataset.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 75, "span_ids": ["impl:5", "TestCacheDataset.test_shape", "TestCacheDataset", "docstring"], "tokens": 666}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport nibabel as nib\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.data import CacheDataset\nfrom monai.transforms import Compose, LoadImaged\n\nTEST_CASE_1 = [Compose([LoadImaged(keys=[\"image\", \"label\", \"extra\"])]), (128, 128, 128)]\n\nTEST_CASE_2 = [None, (128, 128, 128)]\n\n\nclass TestCacheDataset(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_shape(self, transform, expected_shape):\n test_image = nib.Nifti1Image(np.random.randint(0, 2, size=[128, 128, 128]), np.eye(4))\n with tempfile.TemporaryDirectory() as tempdir:\n nib.save(test_image, os.path.join(tempdir, \"test_image1.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_label1.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_extra1.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_image2.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_label2.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_extra2.nii.gz\"))\n test_data = [\n {\n \"image\": os.path.join(tempdir, \"test_image1.nii.gz\"),\n \"label\": os.path.join(tempdir, \"test_label1.nii.gz\"),\n \"extra\": os.path.join(tempdir, \"test_extra1.nii.gz\"),\n },\n {\n \"image\": os.path.join(tempdir, \"test_image2.nii.gz\"),\n \"label\": os.path.join(tempdir, \"test_label2.nii.gz\"),\n \"extra\": os.path.join(tempdir, \"test_extra2.nii.gz\"),\n },\n ]\n dataset = CacheDataset(data=test_data, transform=transform, cache_rate=0.5)\n data1 = dataset[0]\n data2 = dataset[1]\n data3 = dataset[0:-1]\n data4 = dataset[-1]\n self.assertEqual(len(data3), 1)\n\n if transform is None:\n self.assertEqual(data1[\"image\"], os.path.join(tempdir, \"test_image1.nii.gz\"))\n self.assertEqual(data2[\"label\"], os.path.join(tempdir, \"test_label2.nii.gz\"))\n self.assertEqual(data4[\"image\"], os.path.join(tempdir, \"test_image2.nii.gz\"))\n else:\n self.assertTupleEqual(data1[\"image\"].shape, expected_shape)\n self.assertTupleEqual(data1[\"label\"].shape, expected_shape)\n self.assertTupleEqual(data1[\"extra\"].shape, expected_shape)\n self.assertTupleEqual(data2[\"image\"].shape, expected_shape)\n self.assertTupleEqual(data2[\"label\"].shape, expected_shape)\n self.assertTupleEqual(data2[\"extra\"].shape, expected_shape)\n for d in data3:\n self.assertTupleEqual(d[\"image\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_roc_auc.py_unittest_TEST_CASE_7._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compute_roc_auc.py_unittest_TEST_CASE_7._", "embedding": null, "metadata": {"file_path": "tests/test_compute_roc_auc.py", "file_name": "test_compute_roc_auc.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 82, "span_ids": ["impl:11", "docstring"], "tokens": 620}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.metrics import compute_roc_auc\nfrom monai.transforms import Activations, AsDiscrete\n\nTEST_CASE_1 = [\n torch.tensor([[0.1, 0.9], [0.3, 1.4], [0.2, 0.1], [0.1, 0.5]]),\n torch.tensor([[0], [1], [0], [1]]),\n True,\n True,\n \"macro\",\n 0.75,\n]\n\nTEST_CASE_2 = [\n torch.tensor([[0.5], [0.5], [0.2], [8.3]]),\n torch.tensor([[0], [1], [0], [1]]),\n False,\n False,\n \"macro\",\n 0.875,\n]\n\nTEST_CASE_3 = [\n torch.tensor([[0.5], [0.5], [0.2], [8.3]]),\n torch.tensor([0, 1, 0, 1]),\n False,\n False,\n \"macro\",\n 0.875,\n]\n\nTEST_CASE_4 = [\n torch.tensor([0.5, 0.5, 0.2, 8.3]),\n torch.tensor([0, 1, 0, 1]),\n False,\n False,\n \"macro\",\n 0.875,\n]\n\nTEST_CASE_5 = [\n torch.tensor([[0.1, 0.9], [0.3, 1.4], [0.2, 0.1], [0.1, 0.5]]),\n torch.tensor([[0], [1], [0], [1]]),\n True,\n True,\n \"none\",\n [0.75, 0.75],\n]\n\nTEST_CASE_6 = [\n torch.tensor([[0.1, 0.9], [0.3, 1.4], [0.2, 0.1], [0.1, 0.5], [0.1, 0.5]]),\n torch.tensor([[1, 0], [0, 1], [0, 0], [1, 1], [0, 1]]),\n True,\n False,\n \"weighted\",\n 0.56667,\n]\n\nTEST_CASE_7 = [\n torch.tensor([[0.1, 0.9], [0.3, 1.4], [0.2, 0.1], [0.1, 0.5], [0.1, 0.5]]),\n torch.tensor([[1, 0], [0, 1], [0, 0], [1, 1], [0, 1]]),\n True,\n False,\n \"micro\",\n 0.62,\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_csv_saver.py_csv_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_csv_saver.py_csv_", "embedding": null, "metadata": {"file_path": "tests/test_csv_saver.py", "file_name": "test_csv_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["TestCSVSaver.test_saved_content", "TestCSVSaver", "impl", "docstring"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import csv\nimport os\nimport tempfile\nimport unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.data import CSVSaver\n\n\nclass TestCSVSaver(unittest.TestCase):\n def test_saved_content(self):\n with tempfile.TemporaryDirectory() as tempdir:\n saver = CSVSaver(output_dir=tempdir, filename=\"predictions.csv\")\n meta_data = {\"filename_or_obj\": [\"testfile\" + str(i) for i in range(8)]}\n saver.save_batch(torch.zeros(8), meta_data)\n saver.finalize()\n filepath = os.path.join(tempdir, \"predictions.csv\")\n self.assertTrue(os.path.exists(filepath))\n with open(filepath, \"r\") as f:\n reader = csv.reader(f)\n i = 0\n for row in reader:\n self.assertEqual(row[0], \"testfile\" + str(i))\n self.assertEqual(np.array(row[1:]).astype(np.float32), 0.0)\n i += 1\n self.assertEqual(i, 8)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_statsd.py_logging_TEST_CASE_6._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_data_statsd.py_logging_TEST_CASE_6._", "embedding": null, "metadata": {"file_path": "tests/test_data_statsd.py", "file_name": "test_data_statsd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 114, "span_ids": ["impl:9", "docstring"], "tokens": 757}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nimport os\nimport tempfile\nimport unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import DataStatsd\n\nTEST_CASE_1 = [\n {\n \"keys\": \"img\",\n \"prefix\": \"test data\",\n \"data_type\": False,\n \"data_shape\": False,\n \"value_range\": False,\n \"data_value\": False,\n \"additional_info\": None,\n \"logger_handler\": None,\n },\n {\"img\": np.array([[0, 1], [1, 2]])},\n \"test data statistics:\",\n]\n\nTEST_CASE_2 = [\n {\n \"keys\": \"img\",\n \"prefix\": \"test data\",\n \"data_type\": True,\n \"data_shape\": False,\n \"value_range\": False,\n \"data_value\": False,\n \"additional_info\": None,\n \"logger_handler\": None,\n },\n {\"img\": np.array([[0, 1], [1, 2]])},\n \"test data statistics:\\nType: \",\n]\n\nTEST_CASE_3 = [\n {\n \"keys\": \"img\",\n \"prefix\": \"test data\",\n \"data_type\": True,\n \"data_shape\": True,\n \"value_range\": False,\n \"data_value\": False,\n \"additional_info\": None,\n \"logger_handler\": None,\n },\n {\"img\": np.array([[0, 1], [1, 2]])},\n \"test data statistics:\\nType: \\nShape: (2, 2)\",\n]\n\nTEST_CASE_4 = [\n {\n \"keys\": \"img\",\n \"prefix\": \"test data\",\n \"data_type\": True,\n \"data_shape\": True,\n \"value_range\": True,\n \"data_value\": False,\n \"additional_info\": None,\n \"logger_handler\": None,\n },\n {\"img\": np.array([[0, 1], [1, 2]])},\n \"test data statistics:\\nType: \\nShape: (2, 2)\\nValue range: (0, 2)\",\n]\n\nTEST_CASE_5 = [\n {\n \"keys\": \"img\",\n \"prefix\": \"test data\",\n \"data_type\": True,\n \"data_shape\": True,\n \"value_range\": True,\n \"data_value\": True,\n \"additional_info\": None,\n \"logger_handler\": None,\n },\n {\"img\": np.array([[0, 1], [1, 2]])},\n \"test data statistics:\\nType: \\nShape: (2, 2)\\nValue range: (0, 2)\\nValue: [[0 1]\\n [1 2]]\",\n]\n\nTEST_CASE_6 = [\n {\n \"keys\": \"img\",\n \"prefix\": \"test data\",\n \"data_type\": True,\n \"data_shape\": True,\n \"value_range\": True,\n \"data_value\": True,\n \"additional_info\": np.mean,\n \"logger_handler\": None,\n },\n {\"img\": np.array([[0, 1], [1, 2]])},\n (\n \"test data statistics:\\nType: \\nShape: (2, 2)\\n\"\n \"Value range: (0, 2)\\nValue: [[0 1]\\n [1 2]]\\nAdditional info: 1.0\"\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dataset.py_os_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dataset.py_os_", "embedding": null, "metadata": {"file_path": "tests/test_dataset.py", "file_name": "test_dataset.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 93, "span_ids": ["TestDataset.test_shape", "impl:3", "TestDataset", "docstring"], "tokens": 864}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport nibabel as nib\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.data import Dataset\nfrom monai.transforms import Compose, LoadImaged, SimulateDelayd\n\nTEST_CASE_1 = [(128, 128, 128)]\n\n\nclass TestDataset(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1])\n def test_shape(self, expected_shape):\n test_image = nib.Nifti1Image(np.random.randint(0, 2, size=[128, 128, 128]), np.eye(4))\n with tempfile.TemporaryDirectory() as tempdir:\n nib.save(test_image, os.path.join(tempdir, \"test_image1.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_label1.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_extra1.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_image2.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_label2.nii.gz\"))\n nib.save(test_image, os.path.join(tempdir, \"test_extra2.nii.gz\"))\n test_data = [\n {\n \"image\": os.path.join(tempdir, \"test_image1.nii.gz\"),\n \"label\": os.path.join(tempdir, \"test_label1.nii.gz\"),\n \"extra\": os.path.join(tempdir, \"test_extra1.nii.gz\"),\n },\n {\n \"image\": os.path.join(tempdir, \"test_image2.nii.gz\"),\n \"label\": os.path.join(tempdir, \"test_label2.nii.gz\"),\n \"extra\": os.path.join(tempdir, \"test_extra2.nii.gz\"),\n },\n ]\n test_transform = Compose(\n [\n LoadImaged(keys=[\"image\", \"label\", \"extra\"]),\n SimulateDelayd(keys=[\"image\", \"label\", \"extra\"], delay_time=[1e-7, 1e-6, 1e-5]),\n ]\n )\n dataset = Dataset(data=test_data, transform=test_transform)\n data1 = dataset[0]\n data2 = dataset[1]\n\n self.assertTupleEqual(data1[\"image\"].shape, expected_shape)\n self.assertTupleEqual(data1[\"label\"].shape, expected_shape)\n self.assertTupleEqual(data1[\"extra\"].shape, expected_shape)\n self.assertTupleEqual(data2[\"image\"].shape, expected_shape)\n self.assertTupleEqual(data2[\"label\"].shape, expected_shape)\n self.assertTupleEqual(data2[\"extra\"].shape, expected_shape)\n\n dataset = Dataset(data=test_data, transform=LoadImaged(keys=[\"image\", \"label\", \"extra\"]))\n data1_simple = dataset[0]\n data2_simple = dataset[1]\n data3_simple = dataset[-1]\n data4_simple = dataset[[0, 1]]\n\n self.assertTupleEqual(data1_simple[\"image\"].shape, expected_shape)\n self.assertTupleEqual(data1_simple[\"label\"].shape, expected_shape)\n self.assertTupleEqual(data1_simple[\"extra\"].shape, expected_shape)\n self.assertTupleEqual(data2_simple[\"image\"].shape, expected_shape)\n self.assertTupleEqual(data2_simple[\"label\"].shape, expected_shape)\n self.assertTupleEqual(data2_simple[\"extra\"].shape, expected_shape)\n self.assertTupleEqual(data3_simple[\"image\"].shape, expected_shape)\n self.assertTupleEqual(data3_simple[\"label\"].shape, expected_shape)\n self.assertTupleEqual(data3_simple[\"extra\"].shape, expected_shape)\n self.assertTupleEqual(data4_simple[0][\"image\"].shape, expected_shape)\n self.assertTupleEqual(data4_simple[1][\"label\"].shape, expected_shape)\n self.assertTupleEqual(data4_simple[-1][\"extra\"].shape, expected_shape)\n\n data4_list = dataset[0:1]\n self.assertEqual(len(data4_list), 1)\n for d in data4_list:\n self.assertTupleEqual(d[\"image\"].shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_decathlondataset.py_os_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_decathlondataset.py_os_", "embedding": null, "metadata": {"file_path": "tests/test_decathlondataset.py", "file_name": "test_decathlondataset.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 96, "span_ids": ["TestDecathlonDataset", "TestDecathlonDataset.test_values", "impl", "docstring"], "tokens": 715}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport shutil\nimport unittest\nfrom urllib.error import ContentTooShortError, HTTPError\n\nfrom monai.apps import DecathlonDataset\nfrom monai.transforms import AddChanneld, Compose, LoadImaged, ScaleIntensityd, ToTensord\nfrom tests.utils import skip_if_quick\n\n\nclass TestDecathlonDataset(unittest.TestCase):\n @skip_if_quick\n def test_values(self):\n testing_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"testing_data\")\n transform = Compose(\n [\n LoadImaged(keys=[\"image\", \"label\"]),\n AddChanneld(keys=[\"image\", \"label\"]),\n ScaleIntensityd(keys=\"image\"),\n ToTensord(keys=[\"image\", \"label\"]),\n ]\n )\n\n def _test_dataset(dataset):\n self.assertEqual(len(dataset), 52)\n self.assertTrue(\"image\" in dataset[0])\n self.assertTrue(\"label\" in dataset[0])\n self.assertTrue(\"image_meta_dict\" in dataset[0])\n self.assertTupleEqual(dataset[0][\"image\"].shape, (1, 36, 47, 44))\n\n try: # will start downloading if testing_dir doesn't have the Decathlon files\n data = DecathlonDataset(\n root_dir=testing_dir,\n task=\"Task04_Hippocampus\",\n transform=transform,\n section=\"validation\",\n download=True,\n )\n except (ContentTooShortError, HTTPError, RuntimeError) as e:\n print(str(e))\n if isinstance(e, RuntimeError):\n # FIXME: skip MD5 check as current downloading method may fail\n self.assertTrue(str(e).startswith(\"md5 check\"))\n return # skipping this test due the network connection errors\n\n _test_dataset(data)\n data = DecathlonDataset(\n root_dir=testing_dir, task=\"Task04_Hippocampus\", transform=transform, section=\"validation\", download=False\n )\n _test_dataset(data)\n # test validation without transforms\n data = DecathlonDataset(root_dir=testing_dir, task=\"Task04_Hippocampus\", section=\"validation\", download=False)\n self.assertTupleEqual(data[0][\"image\"].shape, (36, 47, 44))\n self.assertEqual(len(data), 52)\n data = DecathlonDataset(root_dir=testing_dir, task=\"Task04_Hippocampus\", section=\"training\", download=False)\n self.assertTupleEqual(data[0][\"image\"].shape, (34, 56, 31))\n self.assertEqual(len(data), 208)\n\n # test dataset properties\n data = DecathlonDataset(\n root_dir=testing_dir,\n task=\"Task04_Hippocampus\",\n section=\"validation\",\n download=False,\n )\n properties = data.get_properties(keys=\"labels\")\n self.assertDictEqual(properties[\"labels\"], {\"0\": \"background\", \"1\": \"Anterior\", \"2\": \"Posterior\"})\n\n shutil.rmtree(os.path.join(testing_dir, \"Task04_Hippocampus\"))\n try:\n data = DecathlonDataset(\n root_dir=testing_dir,\n task=\"Task04_Hippocampus\",\n transform=transform,\n section=\"validation\",\n download=False,\n )\n except RuntimeError as e:\n print(str(e))\n self.assertTrue(str(e).startswith(\"Cannot find dataset directory\"))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_delete_itemsd.py_sys_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_delete_itemsd.py_sys_", "embedding": null, "metadata": {"file_path": "tests/test_delete_itemsd.py", "file_name": "test_delete_itemsd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 38, "span_ids": ["TestDeleteItemsd", "impl:3", "TestDeleteItemsd.test_memory", "docstring"], "tokens": 176}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nimport time\nimport unittest\n\nfrom parameterized import parameterized\n\nfrom monai.transforms import DeleteItemsd\n\nTEST_CASE_1 = [{\"keys\": [str(i) for i in range(30)]}, 20]\n\n\nclass TestDeleteItemsd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1])\n def test_memory(self, input_param, expected_key_size):\n input_data = {}\n for i in range(50):\n input_data[str(i)] = [time.time()] * 100000\n result = DeleteItemsd(**input_param)(input_data)\n self.assertEqual(len(result.keys()), expected_key_size)\n self.assertGreaterEqual(\n sys.getsizeof(input_data) * float(expected_key_size) / len(input_data), sys.getsizeof(result)\n )\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensemble_evaluator.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_ensemble_evaluator.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_ensemble_evaluator.py", "file_name": "test_ensemble_evaluator.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 86, "span_ids": ["TestEnsembleEvaluator", "TestEnsembleEvaluator.test_content", "impl", "docstring"], "tokens": 533}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom ignite.engine import EventEnum, Events\n\nfrom monai.engines import EnsembleEvaluator\n\n\nclass TestEnsembleEvaluator(unittest.TestCase):\n def test_content(self):\n device = torch.device(\"cpu:0\")\n\n class TestDataset(torch.utils.data.Dataset):\n def __len__(self):\n return 8\n\n def __getitem__(self, index):\n return {\"image\": torch.tensor([index]), \"label\": torch.zeros(1)}\n\n val_loader = torch.utils.data.DataLoader(TestDataset())\n\n class TestNet(torch.nn.Module):\n def __init__(self, func):\n super().__init__()\n self.func = func\n\n def forward(self, x):\n return self.func(x)\n\n net0 = TestNet(lambda x: x + 1)\n net1 = TestNet(lambda x: x + 2)\n net2 = TestNet(lambda x: x + 3)\n net3 = TestNet(lambda x: x + 4)\n net4 = TestNet(lambda x: x + 5)\n\n class CustomEvents(EventEnum):\n FOO_EVENT = \"foo_event\"\n BAR_EVENT = \"bar_event\"\n\n val_engine = EnsembleEvaluator(\n device=device,\n val_data_loader=val_loader,\n networks=[net0, net1, net2, net3, net4],\n pred_keys=[\"pred0\", \"pred1\", \"pred2\", \"pred3\", \"pred4\"],\n event_names=[\"bwd_event\", \"opt_event\", CustomEvents],\n event_to_attr={CustomEvents.FOO_EVENT: \"foo\", \"opt_event\": \"opt\"},\n )\n\n @val_engine.on(Events.ITERATION_COMPLETED)\n def run_post_transform(engine):\n for i in range(5):\n expected_value = engine.state.iteration + i\n torch.testing.assert_allclose(engine.state.output[f\"pred{i}\"], torch.tensor([[expected_value]]))\n\n @val_engine.on(Events.EPOCH_COMPLETED)\n def trigger_custom_event():\n val_engine.fire_event(CustomEvents.FOO_EVENT)\n val_engine.fire_event(CustomEvents.BAR_EVENT)\n val_engine.fire_event(\"bwd_event\")\n val_engine.fire_event(\"opt_event\")\n\n @val_engine.on(CustomEvents.FOO_EVENT)\n def do_foo_op():\n self.assertEqual(val_engine.state.foo, 0)\n\n @val_engine.on(\"opt_event\")\n def do_bar_op():\n self.assertEqual(val_engine.state.opt, 0)\n\n val_engine.run()\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_sharpen.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_sharpen.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_gaussian_sharpen.py", "file_name": "test_gaussian_sharpen.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 62, "span_ids": ["TestGaussianSharpen", "impl:7", "TestGaussianSharpen.test_value", "impl:5", "docstring"], "tokens": 745}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import GaussianSharpen\n\nTEST_CASE_1 = [\n {},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[4.1081963, 3.4950666, 4.1081963], [3.7239995, 2.8491793, 3.7239995], [4.569839, 3.9529324, 4.569839]],\n [[10.616725, 9.081067, 10.616725], [9.309998, 7.12295, 9.309998], [11.078365, 9.538931, 11.078365]],\n ]\n ),\n]\n\nTEST_CASE_2 = [\n {\"sigma1\": 1.0, \"sigma2\": 0.75, \"alpha\": 20},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[4.513644, 4.869134, 4.513644], [8.467242, 9.4004135, 8.467242], [10.416813, 12.0653515, 10.416813]],\n [[15.711488, 17.569994, 15.711488], [21.16811, 23.501041, 21.16811], [21.614658, 24.766209, 21.614658]],\n ]\n ),\n]\n\nTEST_CASE_3 = [\n {\"sigma1\": (0.5, 1.0), \"sigma2\": (0.5, 0.75), \"alpha\": 20},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[3.3324685, 3.335536, 3.3324673], [7.7666636, 8.16056, 7.7666636], [12.662973, 14.317837, 12.6629715]],\n [[15.329051, 16.57557, 15.329051], [19.41665, 20.40139, 19.416655], [24.659554, 27.557873, 24.659554]],\n ]\n ),\n]\n\n\nclass TestGaussianSharpen(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, argments, image, expected_data):\n result = GaussianSharpen(**argments)(image)\n np.testing.assert_allclose(result, expected_data, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_sharpend.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_sharpend.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_gaussian_sharpend.py", "file_name": "test_gaussian_sharpend.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 62, "span_ids": ["TestGaussianSharpend.test_value", "TestGaussianSharpend", "impl:7", "impl:5", "docstring"], "tokens": 776}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import GaussianSharpend\n\nTEST_CASE_1 = [\n {\"keys\": \"img\"},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[4.1081963, 3.4950666, 4.1081963], [3.7239995, 2.8491793, 3.7239995], [4.569839, 3.9529324, 4.569839]],\n [[10.616725, 9.081067, 10.616725], [9.309998, 7.12295, 9.309998], [11.078365, 9.538931, 11.078365]],\n ]\n ),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"img\", \"sigma1\": 1.0, \"sigma2\": 0.75, \"alpha\": 20},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[4.513644, 4.869134, 4.513644], [8.467242, 9.4004135, 8.467242], [10.416813, 12.0653515, 10.416813]],\n [[15.711488, 17.569994, 15.711488], [21.16811, 23.501041, 21.16811], [21.614658, 24.766209, 21.614658]],\n ]\n ),\n]\n\nTEST_CASE_3 = [\n {\"keys\": \"img\", \"sigma1\": (0.5, 1.0), \"sigma2\": (0.5, 0.75), \"alpha\": 20},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[3.3324685, 3.335536, 3.3324673], [7.7666636, 8.16056, 7.7666636], [12.662973, 14.317837, 12.6629715]],\n [[15.329051, 16.57557, 15.329051], [19.41665, 20.40139, 19.416655], [24.659554, 27.557873, 24.659554]],\n ]\n ),\n]\n\n\nclass TestGaussianSharpend(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, argments, image, expected_data):\n result = GaussianSharpend(**argments)(image)\n np.testing.assert_allclose(result[\"img\"], expected_data, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_smooth.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_smooth.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_gaussian_smooth.py", "file_name": "test_gaussian_smooth.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 66, "span_ids": ["impl:7", "impl:5", "docstring", "TestGaussianSmooth", "TestGaussianSmooth.test_value"], "tokens": 750}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import GaussianSmooth\n\nTEST_CASE_1 = [\n {\"sigma\": 1.5},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [\n [0.59167546, 0.69312394, 0.59167546],\n [0.7956997, 0.93213004, 0.7956997],\n [0.7668002, 0.8982755, 0.7668002],\n ],\n [[1.6105323, 1.8866735, 1.6105323], [1.9892492, 2.3303251, 1.9892492], [1.7856569, 2.091825, 1.7856569]],\n ]\n ),\n]\n\nTEST_CASE_2 = [\n {\"sigma\": 0.5},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[0.8424794, 0.99864554, 0.8424794], [1.678146, 1.9892154, 1.678146], [1.9889624, 2.3576462, 1.9889624]],\n [[2.966061, 3.5158648, 2.966061], [4.1953645, 4.973038, 4.1953645], [4.112544, 4.8748655, 4.1125436]],\n ]\n ),\n]\n\nTEST_CASE_3 = [\n {\"sigma\": [1.5, 0.5]},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[0.8542037, 1.0125432, 0.8542037], [1.1487541, 1.3616928, 1.1487541], [1.1070318, 1.3122368, 1.1070318]],\n [[2.3251305, 2.756128, 2.3251305], [2.8718853, 3.4042323, 2.8718853], [2.5779586, 3.0558217, 2.5779586]],\n ]\n ),\n]\n\n\nclass TestGaussianSmooth(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, argments, image, expected_data):\n result = GaussianSmooth(**argments)(image)\n np.testing.assert_allclose(result, expected_data, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_smoothd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_smoothd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_gaussian_smoothd.py", "file_name": "test_gaussian_smoothd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 66, "span_ids": ["TestGaussianSmoothd", "impl:7", "impl:5", "TestGaussianSmoothd.test_value", "docstring"], "tokens": 785}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import GaussianSmoothd\n\nTEST_CASE_1 = [\n {\"keys\": \"img\", \"sigma\": 1.5},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [\n [0.59167546, 0.69312394, 0.59167546],\n [0.7956997, 0.93213004, 0.7956997],\n [0.7668002, 0.8982755, 0.7668002],\n ],\n [[1.6105323, 1.8866735, 1.6105323], [1.9892492, 2.3303251, 1.9892492], [1.7856569, 2.091825, 1.7856569]],\n ]\n ),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"img\", \"sigma\": 0.5},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[0.8424794, 0.99864554, 0.8424794], [1.678146, 1.9892154, 1.678146], [1.9889624, 2.3576462, 1.9889624]],\n [[2.966061, 3.5158648, 2.966061], [4.1953645, 4.973038, 4.1953645], [4.112544, 4.8748655, 4.1125436]],\n ]\n ),\n]\n\nTEST_CASE_3 = [\n {\"keys\": \"img\", \"sigma\": [1.5, 0.5]},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[0.8542037, 1.0125432, 0.8542037], [1.1487541, 1.3616928, 1.1487541], [1.1070318, 1.3122368, 1.1070318]],\n [[2.3251305, 2.756128, 2.3251305], [2.8718853, 3.4042323, 2.8718853], [2.5779586, 3.0558217, 2.5779586]],\n ]\n ),\n]\n\n\nclass TestGaussianSmoothd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, argments, image, expected_data):\n result = GaussianSmoothd(**argments)(image)\n np.testing.assert_allclose(result[\"img\"], expected_data, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_loader.py_TestHandlerCheckpointLoader.test_two_save_one_load_TestHandlerCheckpointLoader.test_two_save_one_load.with_tempfile_TemporaryDi.torch_testing_assert_allc": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_loader.py_TestHandlerCheckpointLoader.test_two_save_one_load_TestHandlerCheckpointLoader.test_two_save_one_load.with_tempfile_TemporaryDi.torch_testing_assert_allc", "embedding": null, "metadata": {"file_path": "tests/test_handler_checkpoint_loader.py", "file_name": "test_handler_checkpoint_loader.py", "file_type": "text/x-python", "category": "test", "start_line": 60, "end_line": 80, "span_ids": ["TestHandlerCheckpointLoader.test_two_save_one_load"], "tokens": 283}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHandlerCheckpointLoader(unittest.TestCase):\n\n def test_two_save_one_load(self):\n logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n net1 = torch.nn.PReLU()\n optimizer = optim.SGD(net1.parameters(), lr=0.02)\n data1 = net1.state_dict()\n data1[\"weight\"] = torch.tensor([0.1])\n net1.load_state_dict(data1)\n net2 = torch.nn.PReLU()\n data2 = net2.state_dict()\n data2[\"weight\"] = torch.tensor([0.2])\n net2.load_state_dict(data2)\n with tempfile.TemporaryDirectory() as tempdir:\n engine = Engine(lambda e, b: None)\n save_dict = {\"net\": net1, \"opt\": optimizer}\n CheckpointSaver(save_dir=tempdir, save_dict=save_dict, save_final=True).attach(engine)\n engine.run([0] * 8, max_epochs=5)\n path = tempdir + \"/checkpoint_final_iteration=40.pt\"\n engine = Engine(lambda e, b: None)\n CheckpointLoader(load_path=path, load_dict={\"net\": net2}, strict=True).attach(engine)\n engine.run([0] * 8, max_epochs=1)\n torch.testing.assert_allclose(net2.state_dict()[\"weight\"], torch.tensor([0.1]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_saver.py_TestHandlerCheckpointSaver_TestHandlerCheckpointSaver.test_file.with_tempfile_TemporaryDi.for_filename_in_filenames.self_assertTrue_os_path_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_checkpoint_saver.py_TestHandlerCheckpointSaver_TestHandlerCheckpointSaver.test_file.with_tempfile_TemporaryDi.for_filename_in_filenames.self_assertTrue_os_path_e", "embedding": null, "metadata": {"file_path": "tests/test_handler_checkpoint_saver.py", "file_name": "test_handler_checkpoint_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 108, "end_line": 173, "span_ids": ["TestHandlerCheckpointSaver", "TestHandlerCheckpointSaver.test_file"], "tokens": 388}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestHandlerCheckpointSaver(unittest.TestCase):\n @parameterized.expand(\n [\n TEST_CASE_1,\n TEST_CASE_2,\n TEST_CASE_3,\n TEST_CASE_4,\n TEST_CASE_5,\n TEST_CASE_6,\n TEST_CASE_7,\n TEST_CASE_8,\n ]\n )\n def test_file(\n self,\n save_final,\n final_filename,\n save_key_metric,\n key_metric_name,\n key_metric_n_saved,\n key_metric_filename,\n key_metric_save_state,\n key_metric_greater_or_equal,\n epoch_level,\n save_interval,\n n_saved,\n filenames,\n multi_devices=False,\n ):\n logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n data = [0] * 8\n\n # set up engine\n def _train_func(engine, batch):\n engine.state.metrics[\"val_loss\"] = engine.state.iteration\n\n engine = Engine(_train_func)\n\n # set up testing handler\n net = torch.nn.PReLU()\n if multi_devices:\n net = torch.nn.DataParallel(net)\n optimizer = optim.SGD(net.parameters(), lr=0.02)\n with tempfile.TemporaryDirectory() as tempdir:\n handler = CheckpointSaver(\n tempdir,\n {\"net\": net, \"opt\": optimizer},\n \"CheckpointSaver\",\n \"test\",\n save_final,\n final_filename,\n save_key_metric,\n key_metric_name,\n key_metric_n_saved,\n key_metric_filename,\n key_metric_save_state,\n key_metric_greater_or_equal,\n epoch_level,\n save_interval,\n n_saved,\n )\n handler.attach(engine)\n engine.run(data, max_epochs=2)\n engine.run(data, max_epochs=5)\n for filename in filenames:\n self.assertTrue(os.path.exists(os.path.join(tempdir, filename)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_classification_saver.py_csv_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_classification_saver.py_csv_", "embedding": null, "metadata": {"file_path": "tests/test_handler_classification_saver.py", "file_name": "test_handler_classification_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 54, "span_ids": ["TestHandlerClassificationSaver", "TestHandlerClassificationSaver.test_saved_content", "impl", "docstring"], "tokens": 266}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import csv\nimport os\nimport tempfile\nimport unittest\n\nimport numpy as np\nimport torch\nfrom ignite.engine import Engine\n\nfrom monai.handlers import ClassificationSaver\n\n\nclass TestHandlerClassificationSaver(unittest.TestCase):\n def test_saved_content(self):\n with tempfile.TemporaryDirectory() as tempdir:\n\n # set up engine\n def _train_func(engine, batch):\n return torch.zeros(8)\n\n engine = Engine(_train_func)\n\n # set up testing handler\n saver = ClassificationSaver(output_dir=tempdir, filename=\"predictions.csv\")\n saver.attach(engine)\n\n data = [{\"filename_or_obj\": [\"testfile\" + str(i) for i in range(8)]}]\n engine.run(data, max_epochs=1)\n filepath = os.path.join(tempdir, \"predictions.csv\")\n self.assertTrue(os.path.exists(filepath))\n with open(filepath, \"r\") as f:\n reader = csv.reader(f)\n i = 0\n for row in reader:\n self.assertEqual(row[0], \"testfile\" + str(i))\n self.assertEqual(np.array(row[1:]).astype(np.float32), 0.0)\n i += 1\n self.assertEqual(i, 8)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_lr_scheduler.py_logging_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_lr_scheduler.py_logging_", "embedding": null, "metadata": {"file_path": "tests/test_handler_lr_scheduler.py", "file_name": "test_handler_lr_scheduler.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 66, "span_ids": ["TestHandlerLrSchedule", "TestHandlerLrSchedule.test_content", "impl", "docstring"], "tokens": 368}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nimport sys\nimport unittest\n\nimport numpy as np\nimport torch\nfrom ignite.engine import Engine, Events\n\nfrom monai.handlers import LrScheduleHandler\n\n\nclass TestHandlerLrSchedule(unittest.TestCase):\n def test_content(self):\n logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n data = [0] * 8\n\n # set up engine\n def _train_func(engine, batch):\n pass\n\n val_engine = Engine(_train_func)\n train_engine = Engine(_train_func)\n\n @train_engine.on(Events.EPOCH_COMPLETED)\n def run_validation(engine):\n val_engine.run(data)\n val_engine.state.metrics[\"val_loss\"] = 1\n\n # set up testing handler\n net = torch.nn.PReLU()\n\n def _reduce_lr_on_plateau():\n optimizer = torch.optim.SGD(net.parameters(), 0.1)\n lr_scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=1)\n handler = LrScheduleHandler(lr_scheduler, step_transform=lambda x: val_engine.state.metrics[\"val_loss\"])\n handler.attach(train_engine)\n return lr_scheduler\n\n def _reduce_on_step():\n optimizer = torch.optim.SGD(net.parameters(), 0.1)\n lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=2, gamma=0.1)\n handler = LrScheduleHandler(lr_scheduler)\n handler.attach(train_engine)\n return lr_scheduler\n\n schedulers = _reduce_lr_on_plateau(), _reduce_on_step()\n\n train_engine.run(data, max_epochs=5)\n for scheduler in schedulers:\n np.testing.assert_allclose(scheduler._last_lr[0], 0.001)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_segmentation_saver.py_os_TestHandlerSegmentationSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_segmentation_saver.py_os_TestHandlerSegmentationSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e", "embedding": null, "metadata": {"file_path": "tests/test_handler_segmentation_saver.py", "file_name": "test_handler_segmentation_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 52, "span_ids": ["TestHandlerSegmentationSaver.test_saved_content", "TestHandlerSegmentationSaver", "docstring"], "tokens": 309}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport numpy as np\nimport torch\nfrom ignite.engine import Engine\nfrom parameterized import parameterized\n\nfrom monai.handlers import SegmentationSaver\n\nTEST_CASE_0 = [\".nii.gz\"]\n\nTEST_CASE_1 = [\".png\"]\n\n\nclass TestHandlerSegmentationSaver(unittest.TestCase):\n @parameterized.expand([TEST_CASE_0, TEST_CASE_1])\n def test_saved_content(self, output_ext):\n with tempfile.TemporaryDirectory() as tempdir:\n\n # set up engine\n def _train_func(engine, batch):\n return torch.randint(0, 255, (8, 1, 2, 2)).float()\n\n engine = Engine(_train_func)\n\n # set up testing handler\n saver = SegmentationSaver(output_dir=tempdir, output_postfix=\"seg\", output_ext=output_ext, scale=255)\n saver.attach(engine)\n\n data = [\n {\n \"filename_or_obj\": [\"testfile\" + str(i) + \".nii.gz\" for i in range(8)],\n \"patch_index\": list(range(8)),\n }\n ]\n engine.run(data, max_epochs=1)\n for i in range(8):\n filepath = os.path.join(\"testfile\" + str(i), \"testfile\" + str(i) + \"_seg\" + f\"_{i}\" + output_ext)\n self.assertTrue(os.path.exists(os.path.join(tempdir, filepath)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_logging_TestHandlerStats.test_metrics_print.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_5_10_.self_assertTrue_has_key_w": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_stats.py_logging_TestHandlerStats.test_metrics_print.for_idx_line_in_enumerat.if_grep_match_line_.if_idx_in_5_10_.self_assertTrue_has_key_w", "embedding": null, "metadata": {"file_path": "tests/test_handler_stats.py", "file_name": "test_handler_stats.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 59, "span_ids": ["TestHandlerStats.test_metrics_print", "TestHandlerStats", "docstring"], "tokens": 315}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nimport os\nimport re\nimport tempfile\nimport unittest\nfrom io import StringIO\n\nimport torch\nfrom ignite.engine import Engine, Events\n\nfrom monai.handlers import StatsHandler\n\n\nclass TestHandlerStats(unittest.TestCase):\n def test_metrics_print(self):\n log_stream = StringIO()\n log_handler = logging.StreamHandler(log_stream)\n log_handler.setLevel(logging.INFO)\n key_to_handler = \"test_logging\"\n key_to_print = \"testing_metric\"\n\n # set up engine\n def _train_func(engine, batch):\n return torch.tensor(0.0)\n\n engine = Engine(_train_func)\n\n # set up dummy metric\n @engine.on(Events.EPOCH_COMPLETED)\n def _update_metric(engine):\n current_metric = engine.state.metrics.get(key_to_print, 0.1)\n engine.state.metrics[key_to_print] = current_metric + 0.1\n\n # set up testing handler\n stats_handler = StatsHandler(name=key_to_handler, logger_handler=log_handler)\n stats_handler.attach(engine)\n\n engine.run(range(3), max_epochs=2)\n\n # check logging output\n output_str = log_stream.getvalue()\n log_handler.close()\n grep = re.compile(f\".*{key_to_handler}.*\")\n has_key_word = re.compile(f\".*{key_to_print}.*\")\n for idx, line in enumerate(output_str.split(\"\\n\")):\n if grep.match(line):\n if idx in [5, 10]:\n self.assertTrue(has_key_word.match(line))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_stats.py_glob_TestHandlerTBStats.test_metrics_print.with_tempfile_TemporaryDi.self_assertTrue_len_glob_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_tb_stats.py_glob_TestHandlerTBStats.test_metrics_print.with_tempfile_TemporaryDi.self_assertTrue_len_glob_", "embedding": null, "metadata": {"file_path": "tests/test_handler_tb_stats.py", "file_name": "test_handler_tb_stats.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 44, "span_ids": ["TestHandlerTBStats.test_metrics_print", "TestHandlerTBStats", "docstring"], "tokens": 213}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import glob\nimport tempfile\nimport unittest\n\nfrom ignite.engine import Engine, Events\nfrom torch.utils.tensorboard import SummaryWriter\n\nfrom monai.handlers import TensorBoardStatsHandler\n\n\nclass TestHandlerTBStats(unittest.TestCase):\n def test_metrics_print(self):\n with tempfile.TemporaryDirectory() as tempdir:\n\n # set up engine\n def _train_func(engine, batch):\n return batch + 1.0\n\n engine = Engine(_train_func)\n\n # set up dummy metric\n @engine.on(Events.EPOCH_COMPLETED)\n def _update_metric(engine):\n current_metric = engine.state.metrics.get(\"acc\", 0.1)\n engine.state.metrics[\"acc\"] = current_metric + 0.1\n\n # set up testing handler\n stats_handler = TensorBoardStatsHandler(log_dir=tempdir)\n stats_handler.attach(engine)\n engine.run(range(3), max_epochs=2)\n stats_handler.close()\n # check logging output\n self.assertTrue(len(glob.glob(tempdir)) > 0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_img2tensorboard.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_img2tensorboard.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_img2tensorboard.py", "file_name": "test_img2tensorboard.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 52, "span_ids": ["TestImg2Tensorboard.test_write_gray", "TestImg2Tensorboard", "impl", "docstring"], "tokens": 280}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport tensorboard\nimport torch\n\nfrom monai.visualize import make_animated_gif_summary\n\n\nclass TestImg2Tensorboard(unittest.TestCase):\n def test_write_gray(self):\n nparr = np.ones(shape=(1, 32, 32, 32), dtype=np.float32)\n summary_object_np = make_animated_gif_summary(\n tag=\"test_summary_nparr.png\",\n image=nparr,\n max_out=1,\n animation_axes=(3,),\n image_axes=(1, 2),\n scale_factor=253.0,\n )\n assert isinstance(\n summary_object_np, tensorboard.compat.proto.summary_pb2.Summary\n ), \"make_animated_gif_summary must return a tensorboard.summary object from numpy array\"\n\n tensorarr = torch.tensor(nparr)\n summary_object_tensor = make_animated_gif_summary(\n tag=\"test_summary_tensorarr.png\",\n image=tensorarr,\n max_out=1,\n animation_axes=(3,),\n image_axes=(1, 2),\n scale_factor=253.0,\n )\n assert isinstance(\n summary_object_tensor, tensorboard.compat.proto.summary_pb2.Summary\n ), \"make_animated_gif_summary must return a tensorboard.summary object from tensor input\"\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows_gan.py_run_training_test_run_training_test.return.trainer_state": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows_gan.py_run_training_test_run_training_test.return.trainer_state", "embedding": null, "metadata": {"file_path": "tests/test_integration_workflows_gan.py", "file_name": "test_integration_workflows_gan.py", "file_type": "text/x-python", "category": "test", "start_line": 36, "end_line": 126, "span_ids": ["run_training_test"], "tokens": 820}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def run_training_test(root_dir, device=\"cuda:0\"):\n real_images = sorted(glob(os.path.join(root_dir, \"img*.nii.gz\")))\n train_files = [{\"reals\": img} for img in zip(real_images)]\n\n # prepare real data\n train_transforms = Compose(\n [\n LoadImaged(keys=[\"reals\"]),\n AsChannelFirstd(keys=[\"reals\"]),\n ScaleIntensityd(keys=[\"reals\"]),\n RandFlipd(keys=[\"reals\"], prob=0.5),\n ToTensord(keys=[\"reals\"]),\n ]\n )\n train_ds = monai.data.CacheDataset(data=train_files, transform=train_transforms, cache_rate=0.5)\n train_loader = monai.data.DataLoader(train_ds, batch_size=2, shuffle=True, num_workers=4)\n\n learning_rate = 2e-4\n betas = (0.5, 0.999)\n real_label = 1\n fake_label = 0\n\n # create discriminator\n disc_net = Discriminator(\n in_shape=(1, 64, 64), channels=(8, 16, 32, 64, 1), strides=(2, 2, 2, 2, 1), num_res_units=1, kernel_size=5\n ).to(device)\n disc_net.apply(normal_init)\n disc_opt = torch.optim.Adam(disc_net.parameters(), learning_rate, betas=betas)\n disc_loss_criterion = torch.nn.BCELoss()\n\n def discriminator_loss(gen_images, real_images):\n real = real_images.new_full((real_images.shape[0], 1), real_label)\n gen = gen_images.new_full((gen_images.shape[0], 1), fake_label)\n realloss = disc_loss_criterion(disc_net(real_images), real)\n genloss = disc_loss_criterion(disc_net(gen_images.detach()), gen)\n return torch.div(torch.add(realloss, genloss), 2)\n\n # create generator\n latent_size = 64\n gen_net = Generator(\n latent_shape=latent_size, start_shape=(latent_size, 8, 8), channels=[32, 16, 8, 1], strides=[2, 2, 2, 1]\n )\n gen_net.apply(normal_init)\n gen_net.conv.add_module(\"activation\", torch.nn.Sigmoid())\n gen_net = gen_net.to(device)\n gen_opt = torch.optim.Adam(gen_net.parameters(), learning_rate, betas=betas)\n gen_loss_criterion = torch.nn.BCELoss()\n\n def generator_loss(gen_images):\n output = disc_net(gen_images)\n cats = output.new_full(output.shape, real_label)\n return gen_loss_criterion(output, cats)\n\n key_train_metric = None\n\n train_handlers = [\n StatsHandler(\n name=\"training_loss\",\n output_transform=lambda x: {Keys.GLOSS: x[Keys.GLOSS], Keys.DLOSS: x[Keys.DLOSS]},\n ),\n TensorBoardStatsHandler(\n log_dir=root_dir,\n tag_name=\"training_loss\",\n output_transform=lambda x: {Keys.GLOSS: x[Keys.GLOSS], Keys.DLOSS: x[Keys.DLOSS]},\n ),\n CheckpointSaver(\n save_dir=root_dir, save_dict={\"g_net\": gen_net, \"d_net\": disc_net}, save_interval=2, epoch_level=True\n ),\n ]\n\n disc_train_steps = 2\n num_epochs = 5\n\n trainer = GanTrainer(\n device,\n num_epochs,\n train_loader,\n gen_net,\n gen_opt,\n generator_loss,\n disc_net,\n disc_opt,\n discriminator_loss,\n d_train_steps=disc_train_steps,\n latent_shape=latent_size,\n key_train_metric=key_train_metric,\n train_handlers=train_handlers,\n )\n trainer.run()\n\n return trainer.state", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows_gan.py_IntegrationWorkflowsGAN_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_integration_workflows_gan.py_IntegrationWorkflowsGAN_", "embedding": null, "metadata": {"file_path": "tests/test_integration_workflows_gan.py", "file_name": "test_integration_workflows_gan.py", "file_type": "text/x-python", "category": "test", "start_line": 129, "end_line": 161, "span_ids": ["impl", "IntegrationWorkflowsGAN", "IntegrationWorkflowsGAN.tearDown", "IntegrationWorkflowsGAN.test_training", "IntegrationWorkflowsGAN.setUp"], "tokens": 266}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "@skip_if_quick\nclass IntegrationWorkflowsGAN(DistTestCase):\n def setUp(self):\n set_determinism(seed=0)\n\n self.data_dir = tempfile.mkdtemp()\n for i in range(40):\n im, _ = create_test_image_2d(64, 64, num_objs=3, rad_max=14, num_seg_classes=1, channel_dim=-1)\n n = nib.Nifti1Image(im, np.eye(4))\n nib.save(n, os.path.join(self.data_dir, f\"img{i:d}.nii.gz\"))\n\n self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu:0\")\n monai.config.print_config()\n logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n\n def tearDown(self):\n set_determinism(seed=None)\n shutil.rmtree(self.data_dir)\n\n @TimedCall(seconds=200, daemon=False)\n def test_training(self):\n torch.manual_seed(0)\n\n finish_state = run_training_test(self.data_dir, device=self.device)\n\n # assert GAN training finished\n self.assertEqual(finish_state.iteration, 100)\n self.assertEqual(finish_state.epoch, 5)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_list_to_dict.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_list_to_dict.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_list_to_dict.py", "file_name": "test_list_to_dict.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 53, "span_ids": ["impl:11", "TestListToDict.test_value_shape", "TestListToDict", "docstring"], "tokens": 398}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nfrom parameterized import parameterized\n\nfrom monai.utils import list_to_dict\n\nTEST_CASE_1 = [\n [\"a=1\", \"b=2\", \"c=3\", \"d=4\"],\n {\"a\": 1, \"b\": 2, \"c\": 3, \"d\": 4},\n]\n\nTEST_CASE_2 = [\n [\"a=a\", \"b=b\", \"c=c\", \"d=d\"],\n {\"a\": \"a\", \"b\": \"b\", \"c\": \"c\", \"d\": \"d\"},\n]\n\nTEST_CASE_3 = [\n [\"a=0.1\", \"b=0.2\", \"c=0.3\", \"d=0.4\"],\n {\"a\": 0.1, \"b\": 0.2, \"c\": 0.3, \"d\": 0.4},\n]\n\nTEST_CASE_4 = [\n [\"a=True\", \"b=TRUE\", \"c=false\", \"d=FALSE\"],\n {\"a\": True, \"b\": True, \"c\": False, \"d\": False},\n]\n\nTEST_CASE_5 = [\n [\"a='1'\", \"b=2 \", \" c = 3\", \"d='test'\", \"'e'=0\", \"f\", \"g=None\"],\n {\"a\": 1, \"b\": 2, \"c\": 3, \"d\": \"test\", \"e\": 0, \"f\": None, \"g\": None},\n]\n\n\nclass TestListToDict(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_value_shape(self, input, output):\n result = list_to_dict(input)\n self.assertDictEqual(result, output)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_unittest_TEST_CASE_1._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_unittest_TEST_CASE_1._", "embedding": null, "metadata": {"file_path": "tests/test_lltm.py", "file_name": "test_lltm.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 24, "span_ids": ["docstring"], "tokens": 160}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks.layers import LLTM\nfrom tests.utils import SkipIfNoModule\n\nTEST_CASE_1 = [\n {\"input_features\": 32, \"state_size\": 2},\n torch.tensor([[-0.1622, 0.1663], [0.5465, 0.0459], [-0.1436, 0.6171], [0.3632, -0.0111]]),\n torch.tensor([[-1.3773, 0.3348], [0.8353, 1.3064], [-0.2179, 4.1739], [1.3045, -0.1444]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_TestLLTM_TestLLTM.test_value.None_3": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_TestLLTM_TestLLTM.test_value.None_3", "embedding": null, "metadata": {"file_path": "tests/test_lltm.py", "file_name": "test_lltm.py", "file_type": "text/x-python", "category": "test", "start_line": 27, "end_line": 39, "span_ids": ["TestLLTM", "TestLLTM.test_value"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLLTM(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1])\n @SkipIfNoModule(\"monai._C\")\n def test_value(self, input_param, expected_h, expected_c):\n torch.manual_seed(0)\n x = torch.randn(4, 32)\n h = torch.randn(4, 2)\n c = torch.randn(4, 2)\n new_h, new_c = LLTM(**input_param)(x, (h, c))\n (new_h.sum() + new_c.sum()).backward()\n\n torch.testing.assert_allclose(new_h, expected_h, rtol=0.0001, atol=1e-04)\n torch.testing.assert_allclose(new_c, expected_c, rtol=0.0001, atol=1e-04)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_TestLLTM.test_value_cuda_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_lltm.py_TestLLTM.test_value_cuda_", "embedding": null, "metadata": {"file_path": "tests/test_lltm.py", "file_name": "test_lltm.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 59, "span_ids": ["impl:3", "TestLLTM.test_value_cuda"], "tokens": 233}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLLTM(unittest.TestCase):\n\n @parameterized.expand([TEST_CASE_1])\n @SkipIfNoModule(\"monai._C\")\n def test_value_cuda(self, input_param, expected_h, expected_c):\n device = torch.device(\"cuda:0\") if torch.cuda.is_available() else torch.device(\"cpu:0\")\n torch.manual_seed(0)\n x = torch.randn(4, 32).to(device)\n h = torch.randn(4, 2).to(device)\n c = torch.randn(4, 2).to(device)\n lltm = LLTM(**input_param).to(device)\n new_h, new_c = lltm(x, (h, c))\n (new_h.sum() + new_c.sum()).backward()\n\n torch.testing.assert_allclose(new_h, expected_h.to(device), rtol=0.0001, atol=1e-04)\n torch.testing.assert_allclose(new_c, expected_c.to(device), rtol=0.0001, atol=1e-04)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mednistdataset.py_os_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_mednistdataset.py_os_", "embedding": null, "metadata": {"file_path": "tests/test_mednistdataset.py", "file_name": "test_mednistdataset.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 74, "span_ids": ["TestMedNISTDataset", "impl:3", "TestMedNISTDataset.test_values", "docstring"], "tokens": 574}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport shutil\nimport unittest\nfrom urllib.error import ContentTooShortError, HTTPError\n\nfrom monai.apps import MedNISTDataset\nfrom monai.transforms import AddChanneld, Compose, LoadImaged, ScaleIntensityd, ToTensord\nfrom tests.utils import skip_if_quick\n\nMEDNIST_FULL_DATASET_LENGTH = 58954\n\n\nclass TestMedNISTDataset(unittest.TestCase):\n @skip_if_quick\n def test_values(self):\n testing_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"testing_data\")\n transform = Compose(\n [\n LoadImaged(keys=\"image\"),\n AddChanneld(keys=\"image\"),\n ScaleIntensityd(keys=\"image\"),\n ToTensord(keys=[\"image\", \"label\"]),\n ]\n )\n\n def _test_dataset(dataset):\n self.assertEqual(len(dataset), int(MEDNIST_FULL_DATASET_LENGTH * dataset.test_frac))\n self.assertTrue(\"image\" in dataset[0])\n self.assertTrue(\"label\" in dataset[0])\n self.assertTrue(\"image_meta_dict\" in dataset[0])\n self.assertTupleEqual(dataset[0][\"image\"].shape, (1, 64, 64))\n\n try: # will start downloading if testing_dir doesn't have the MedNIST files\n data = MedNISTDataset(root_dir=testing_dir, transform=transform, section=\"test\", download=True)\n except (ContentTooShortError, HTTPError, RuntimeError) as e:\n print(str(e))\n if isinstance(e, RuntimeError):\n # FIXME: skip MD5 check as current downloading method may fail\n self.assertTrue(str(e).startswith(\"md5 check\"))\n return # skipping this test due the network connection errors\n\n _test_dataset(data)\n\n # testing from\n data = MedNISTDataset(root_dir=testing_dir, transform=transform, section=\"test\", download=False)\n data.get_num_classes()\n _test_dataset(data)\n data = MedNISTDataset(root_dir=testing_dir, section=\"test\", download=False)\n self.assertTupleEqual(data[0][\"image\"].shape, (64, 64))\n # test same dataset length with different random seed\n data = MedNISTDataset(root_dir=testing_dir, transform=transform, section=\"test\", download=False, seed=42)\n _test_dataset(data)\n shutil.rmtree(os.path.join(testing_dir, \"MedNIST\"))\n try:\n data = MedNISTDataset(root_dir=testing_dir, transform=transform, section=\"test\", download=False)\n except RuntimeError as e:\n print(str(e))\n self.assertTrue(str(e).startswith(\"Cannot find dataset directory\"))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_2d_TestNiftiLoadRead.test_write_2d.with_tempfile_TemporaryDi.None_5": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_2d_TestNiftiLoadRead.test_write_2d.with_tempfile_TemporaryDi.None_5", "embedding": null, "metadata": {"file_path": "tests/test_nifti_rw.py", "file_name": "test_nifti_rw.py", "file_type": "text/x-python", "category": "test", "start_line": 108, "end_line": 122, "span_ids": ["TestNiftiLoadRead.test_write_2d"], "tokens": 290}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNiftiLoadRead(unittest.TestCase):\n\n def test_write_2d(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.nii.gz\")\n img = np.arange(6).reshape((2, 3))\n write_nifti(img, image_name, affine=np.diag([1]), target_affine=np.diag([1.4]))\n out = nib.load(image_name)\n np.testing.assert_allclose(out.get_fdata(), [[0, 1, 2], [3.0, 4, 5]])\n np.testing.assert_allclose(out.affine, np.diag([1.4, 1, 1, 1]))\n\n image_name = os.path.join(out_dir, \"test1.nii.gz\")\n img = np.arange(5).reshape((1, 5))\n write_nifti(img, image_name, affine=np.diag([1, 1, 1, 3, 3]), target_affine=np.diag([1.4, 2.0, 1, 3, 5]))\n out = nib.load(image_name)\n np.testing.assert_allclose(out.get_fdata(), [[0, 2, 4]])\n np.testing.assert_allclose(out.affine, np.diag([1.4, 2, 1, 1]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_3d_TestNiftiLoadRead.test_write_3d.with_tempfile_TemporaryDi.None_5": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_3d_TestNiftiLoadRead.test_write_3d.with_tempfile_TemporaryDi.None_5", "embedding": null, "metadata": {"file_path": "tests/test_nifti_rw.py", "file_name": "test_nifti_rw.py", "file_type": "text/x-python", "category": "test", "start_line": 124, "end_line": 138, "span_ids": ["TestNiftiLoadRead.test_write_3d"], "tokens": 296}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNiftiLoadRead(unittest.TestCase):\n\n def test_write_3d(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.nii.gz\")\n img = np.arange(6).reshape((1, 2, 3))\n write_nifti(img, image_name, affine=np.diag([1]), target_affine=np.diag([1.4]))\n out = nib.load(image_name)\n np.testing.assert_allclose(out.get_fdata(), [[[0, 1, 2], [3, 4, 5]]])\n np.testing.assert_allclose(out.affine, np.diag([1.4, 1, 1, 1]))\n\n image_name = os.path.join(out_dir, \"test1.nii.gz\")\n img = np.arange(5).reshape((1, 1, 5))\n write_nifti(img, image_name, affine=np.diag([1, 1, 1, 3, 3]), target_affine=np.diag([1.4, 2.0, 2, 3, 5]))\n out = nib.load(image_name)\n np.testing.assert_allclose(out.get_fdata(), [[[0, 2, 4]]])\n np.testing.assert_allclose(out.affine, np.diag([1.4, 2, 2, 1]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_4d_TestNiftiLoadRead.test_write_4d.with_tempfile_TemporaryDi.None_5": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_rw.py_TestNiftiLoadRead.test_write_4d_TestNiftiLoadRead.test_write_4d.with_tempfile_TemporaryDi.None_5", "embedding": null, "metadata": {"file_path": "tests/test_nifti_rw.py", "file_name": "test_nifti_rw.py", "file_type": "text/x-python", "category": "test", "start_line": 140, "end_line": 154, "span_ids": ["TestNiftiLoadRead.test_write_4d"], "tokens": 315}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNiftiLoadRead(unittest.TestCase):\n\n def test_write_4d(self):\n with tempfile.TemporaryDirectory() as out_dir:\n image_name = os.path.join(out_dir, \"test.nii.gz\")\n img = np.arange(6).reshape((1, 1, 3, 2))\n write_nifti(img, image_name, affine=np.diag([1.4, 1]), target_affine=np.diag([1, 1.4, 1]))\n out = nib.load(image_name)\n np.testing.assert_allclose(out.get_fdata(), [[[[0, 1], [2, 3], [4, 5]]]])\n np.testing.assert_allclose(out.affine, np.diag([1, 1.4, 1, 1]))\n\n image_name = os.path.join(out_dir, \"test1.nii.gz\")\n img = np.arange(5).reshape((1, 1, 5, 1))\n write_nifti(img, image_name, affine=np.diag([1, 1, 1, 3, 3]), target_affine=np.diag([1.4, 2.0, 2, 3, 5]))\n out = nib.load(image_name)\n np.testing.assert_allclose(out.get_fdata(), [[[[0], [2], [4]]]])\n np.testing.assert_allclose(out.affine, np.diag([1.4, 2, 2, 1]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_saver.py_os_TestNiftiSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_saver.py_os_TestNiftiSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e", "embedding": null, "metadata": {"file_path": "tests/test_nifti_saver.py", "file_name": "test_nifti_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 33, "span_ids": ["TestNiftiSaver.test_saved_content", "TestNiftiSaver", "docstring"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport numpy as np\nimport torch\n\nfrom monai.data import NiftiSaver\nfrom monai.transforms import LoadImage\n\n\nclass TestNiftiSaver(unittest.TestCase):\n def test_saved_content(self):\n with tempfile.TemporaryDirectory() as tempdir:\n\n saver = NiftiSaver(output_dir=tempdir, output_postfix=\"seg\", output_ext=\".nii.gz\")\n\n meta_data = {\"filename_or_obj\": [\"testfile\" + str(i) + \".nii\" for i in range(8)]}\n saver.save_batch(torch.zeros(8, 1, 2, 2), meta_data)\n for i in range(8):\n filepath = os.path.join(\"testfile\" + str(i), \"testfile\" + str(i) + \"_seg.nii.gz\")\n self.assertTrue(os.path.exists(os.path.join(tempdir, filepath)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_saver.py_TestNiftiSaver.test_saved_resize_content_TestNiftiSaver.test_saved_resize_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_nifti_saver.py_TestNiftiSaver.test_saved_resize_content_TestNiftiSaver.test_saved_resize_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e", "embedding": null, "metadata": {"file_path": "tests/test_nifti_saver.py", "file_name": "test_nifti_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 34, "end_line": 52, "span_ids": ["TestNiftiSaver.test_saved_resize_content"], "tokens": 222}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNiftiSaver(unittest.TestCase):\n\n def test_saved_resize_content(self):\n with tempfile.TemporaryDirectory() as tempdir:\n\n saver = NiftiSaver(\n output_dir=tempdir,\n output_postfix=\"seg\",\n output_ext=\".nii.gz\",\n dtype=np.float32,\n )\n\n meta_data = {\n \"filename_or_obj\": [\"testfile\" + str(i) + \".nii\" for i in range(8)],\n \"affine\": [np.diag(np.ones(4)) * 5] * 8,\n \"original_affine\": [np.diag(np.ones(4)) * 1.0] * 8,\n }\n saver.save_batch(torch.randint(0, 255, (8, 8, 2, 2)), meta_data)\n for i in range(8):\n filepath = os.path.join(\"testfile\" + str(i), \"testfile\" + str(i) + \"_seg.nii.gz\")\n self.assertTrue(os.path.exists(os.path.join(tempdir, filepath)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_persistentdataset.py_os_TEST_CASE_3._None_128_128_128_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_persistentdataset.py_os_TEST_CASE_3._None_128_128_128_", "embedding": null, "metadata": {"file_path": "tests/test_persistentdataset.py", "file_name": "test_persistentdataset.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["docstring"], "tokens": 221}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport nibabel as nib\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.data import PersistentDataset, json_hashing\nfrom monai.transforms import Compose, LoadImaged, SimulateDelayd, Transform\n\nTEST_CASE_1 = [\n Compose(\n [\n LoadImaged(keys=[\"image\", \"label\", \"extra\"]),\n SimulateDelayd(keys=[\"image\", \"label\", \"extra\"], delay_time=[1e-7, 1e-6, 1e-5]),\n ]\n ),\n (128, 128, 128),\n]\n\nTEST_CASE_2 = [\n [\n LoadImaged(keys=[\"image\", \"label\", \"extra\"]),\n SimulateDelayd(keys=[\"image\", \"label\", \"extra\"], delay_time=[1e-7, 1e-6, 1e-5]),\n ],\n (128, 128, 128),\n]\n\nTEST_CASE_3 = [None, (128, 128, 128)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_saver.py_os_TestPNGSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_saver.py_os_TestPNGSaver.test_saved_content.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e", "embedding": null, "metadata": {"file_path": "tests/test_png_saver.py", "file_name": "test_png_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 31, "span_ids": ["TestPNGSaver.test_saved_content", "TestPNGSaver", "docstring"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport torch\n\nfrom monai.data import PNGSaver\n\n\nclass TestPNGSaver(unittest.TestCase):\n def test_saved_content(self):\n with tempfile.TemporaryDirectory() as tempdir:\n\n saver = PNGSaver(output_dir=tempdir, output_postfix=\"seg\", output_ext=\".png\", scale=255)\n\n meta_data = {\"filename_or_obj\": [\"testfile\" + str(i) + \".jpg\" for i in range(8)]}\n saver.save_batch(torch.randint(1, 200, (8, 1, 2, 2)), meta_data)\n for i in range(8):\n filepath = os.path.join(\"testfile\" + str(i), \"testfile\" + str(i) + \"_seg.png\")\n self.assertTrue(os.path.exists(os.path.join(tempdir, filepath)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_saver.py_TestPNGSaver.test_saved_content_three_channel_TestPNGSaver.test_saved_content_three_channel.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_png_saver.py_TestPNGSaver.test_saved_content_three_channel_TestPNGSaver.test_saved_content_three_channel.with_tempfile_TemporaryDi.for_i_in_range_8_.self_assertTrue_os_path_e", "embedding": null, "metadata": {"file_path": "tests/test_png_saver.py", "file_name": "test_png_saver.py", "file_type": "text/x-python", "category": "test", "start_line": 33, "end_line": 42, "span_ids": ["TestPNGSaver.test_saved_content_three_channel"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPNGSaver(unittest.TestCase):\n\n def test_saved_content_three_channel(self):\n with tempfile.TemporaryDirectory() as tempdir:\n\n saver = PNGSaver(output_dir=tempdir, output_postfix=\"seg\", output_ext=\".png\", scale=255)\n\n meta_data = {\"filename_or_obj\": [\"testfile\" + str(i) + \".jpg\" for i in range(8)]}\n saver.save_batch(torch.randint(1, 200, (8, 3, 2, 2)), meta_data)\n for i in range(8):\n filepath = os.path.join(\"testfile\" + str(i), \"testfile\" + str(i) + \"_seg.png\")\n self.assertTrue(os.path.exists(os.path.join(tempdir, filepath)))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpen.py_unittest_TEST_CASE_2._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpen.py_unittest_TEST_CASE_2._", "embedding": null, "metadata": {"file_path": "tests/test_rand_gaussian_sharpen.py", "file_name": "test_rand_gaussian_sharpen.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 47, "span_ids": ["docstring"], "tokens": 506}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandGaussianSharpen\n\nTEST_CASE_1 = [\n {\"prob\": 1.0},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[5.2919216, 5.5854445, 5.29192], [11.3982, 12.62332, 11.398202], [14.870525, 17.323769, 14.870527]],\n [[20.413757, 22.767355, 20.413757], [28.495504, 31.558315, 28.495499], [29.99236, 34.505676, 29.992361]],\n ]\n ),\n]\n\nTEST_CASE_2 = [\n {\n \"sigma1_x\": (0.5, 0.75),\n \"sigma1_y\": (0.5, 0.75),\n \"sigma1_z\": (0.5, 0.75),\n \"sigma2_x\": 0.4,\n \"sigma2_y\": 0.4,\n \"sigma2_z\": 0.4,\n \"prob\": 1.0,\n },\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[4.1071496, 3.597953, 4.1071477], [10.062014, 9.825114, 10.0620165], [14.698058, 15.818766, 14.698058]],\n [[18.211048, 18.16049, 18.211048], [25.155039, 24.56279, 25.155039], [28.801964, 30.381308, 28.801964]],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpen.py_TEST_CASE_3_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpen.py_TEST_CASE_3_", "embedding": null, "metadata": {"file_path": "tests/test_rand_gaussian_sharpen.py", "file_name": "test_rand_gaussian_sharpen.py", "file_type": "text/x-python", "category": "test", "start_line": 49, "end_line": 100, "span_ids": ["impl:9", "TestRandGaussianSharpen", "impl:5", "TestRandGaussianSharpen.test_value"], "tokens": 710}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_3 = [\n {\n \"sigma1_x\": (0.5, 0.75),\n \"sigma1_y\": (0.5, 0.75),\n \"sigma1_z\": (0.5, 0.75),\n \"sigma2_x\": (0.5, 0.75),\n \"sigma2_y\": (0.5, 0.75),\n \"sigma2_z\": (0.5, 0.75),\n \"prob\": 1.0,\n },\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[4.81077, 4.4237204, 4.81077], [12.061236, 12.298177, 12.061236], [17.362553, 19.201174, 17.362553]],\n [[21.440754, 22.142393, 21.440754], [30.15308, 30.745445, 30.153086], [33.99255, 36.919838, 33.99255]],\n ]\n ),\n]\n\nTEST_CASE_4 = [\n {\n \"sigma1_x\": (0.5, 0.75),\n \"sigma1_y\": (0.5, 0.75),\n \"sigma1_z\": (0.5, 0.75),\n \"sigma2_x\": (0.5, 0.75),\n \"sigma2_y\": (0.5, 0.75),\n \"sigma2_z\": (0.5, 0.75),\n \"approx\": \"scalespace\",\n \"prob\": 1.0,\n },\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[4.430213, 3.2278745, 4.4302144], [10.325399, 8.507457, 10.325399], [17.494898, 16.5609, 17.494894]],\n [[20.87405, 18.06946, 20.87405], [25.813503, 21.268656, 25.8135], [33.93874, 31.402481, 33.938725]],\n ]\n ),\n]\n\n\nclass TestRandGaussianSharpen(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4])\n def test_value(self, argments, image, expected_data):\n converter = RandGaussianSharpen(**argments)\n converter.set_random_state(seed=0)\n result = converter(image)\n np.testing.assert_allclose(result, expected_data, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpend.py_unittest_TEST_CASE_2._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpend.py_unittest_TEST_CASE_2._", "embedding": null, "metadata": {"file_path": "tests/test_rand_gaussian_sharpend.py", "file_name": "test_rand_gaussian_sharpend.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 48, "span_ids": ["docstring"], "tokens": 527}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandGaussianSharpend\n\nTEST_CASE_1 = [\n {\"keys\": \"img\", \"prob\": 1.0},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[5.2919216, 5.5854445, 5.29192], [11.3982, 12.62332, 11.398202], [14.870525, 17.323769, 14.870527]],\n [[20.413757, 22.767355, 20.413757], [28.495504, 31.558315, 28.495499], [29.99236, 34.505676, 29.992361]],\n ]\n ),\n]\n\nTEST_CASE_2 = [\n {\n \"keys\": \"img\",\n \"sigma1_x\": (0.5, 0.75),\n \"sigma1_y\": (0.5, 0.75),\n \"sigma1_z\": (0.5, 0.75),\n \"sigma2_x\": 0.4,\n \"sigma2_y\": 0.4,\n \"sigma2_z\": 0.4,\n \"prob\": 1.0,\n },\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[4.1071496, 3.597953, 4.1071477], [10.062014, 9.825114, 10.0620165], [14.698058, 15.818766, 14.698058]],\n [[18.211048, 18.16049, 18.211048], [25.155039, 24.56279, 25.155039], [28.801964, 30.381308, 28.801964]],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpend.py_TEST_CASE_3_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_sharpend.py_TEST_CASE_3_", "embedding": null, "metadata": {"file_path": "tests/test_rand_gaussian_sharpend.py", "file_name": "test_rand_gaussian_sharpend.py", "file_type": "text/x-python", "category": "test", "start_line": 50, "end_line": 103, "span_ids": ["impl:9", "TestRandGaussianSharpend", "TestRandGaussianSharpend.test_value", "impl:5"], "tokens": 734}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_3 = [\n {\n \"keys\": \"img\",\n \"sigma1_x\": (0.5, 0.75),\n \"sigma1_y\": (0.5, 0.75),\n \"sigma1_z\": (0.5, 0.75),\n \"sigma2_x\": (0.5, 0.75),\n \"sigma2_y\": (0.5, 0.75),\n \"sigma2_z\": (0.5, 0.75),\n \"prob\": 1.0,\n },\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[4.81077, 4.4237204, 4.81077], [12.061236, 12.298177, 12.061236], [17.362553, 19.201174, 17.362553]],\n [[21.440754, 22.142393, 21.440754], [30.15308, 30.745445, 30.153086], [33.99255, 36.919838, 33.99255]],\n ]\n ),\n]\n\nTEST_CASE_4 = [\n {\n \"keys\": \"img\",\n \"sigma1_x\": (0.5, 0.75),\n \"sigma1_y\": (0.5, 0.75),\n \"sigma1_z\": (0.5, 0.75),\n \"sigma2_x\": (0.5, 0.75),\n \"sigma2_y\": (0.5, 0.75),\n \"sigma2_z\": (0.5, 0.75),\n \"approx\": \"scalespace\",\n \"prob\": 1.0,\n },\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[4.430213, 3.2278745, 4.4302144], [10.325399, 8.507457, 10.325399], [17.494898, 16.5609, 17.494894]],\n [[20.87405, 18.06946, 20.87405], [25.813503, 21.268656, 25.8135], [33.93874, 31.402481, 33.938725]],\n ]\n ),\n]\n\n\nclass TestRandGaussianSharpend(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4])\n def test_value(self, argments, image, expected_data):\n converter = RandGaussianSharpend(**argments)\n converter.set_random_state(seed=0)\n result = converter(image)\n np.testing.assert_allclose(result[\"img\"], expected_data, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smooth.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smooth.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_gaussian_smooth.py", "file_name": "test_rand_gaussian_smooth.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 64, "span_ids": ["TestRandGaussianSmooth.test_value", "TestRandGaussianSmooth", "impl:7", "impl:5", "docstring"], "tokens": 840}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandGaussianSmooth\n\nTEST_CASE_1 = [\n {\"sigma_x\": (0.5, 1.5), \"prob\": 1.0},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[0.71806467, 0.9074683, 0.71806467], [1.0718315, 1.3545481, 1.0718315], [1.0337002, 1.306359, 1.0337002]],\n [[2.0318885, 2.5678391, 2.0318885], [2.6795788, 3.3863702, 2.6795788], [2.3475242, 2.9667296, 2.3475242]],\n ]\n ),\n]\n\nTEST_CASE_2 = [\n {\"sigma_x\": (0.5, 1.5), \"sigma_y\": (0.5, 1.0), \"prob\": 1.0},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[0.7686928, 0.9848021, 0.7686928], [1.1474025, 1.4699818, 1.1474024], [1.1065826, 1.4176859, 1.1065826]],\n [[2.1751494, 2.7866683, 2.1751497], [2.8685062, 3.6749542, 2.8685062], [2.5130394, 3.219552, 2.5130394]],\n ]\n ),\n]\n\nTEST_CASE_3 = [\n {\"sigma_x\": (0.5, 1.5), \"sigma_y\": (0.5, 1.0), \"approx\": \"scalespace\", \"prob\": 1.0},\n np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]]),\n np.array(\n [\n [[0.8128456, 0.96736777, 0.8128456], [1.2742369, 1.5164697, 1.2742369], [1.2800367, 1.5233722, 1.2800368]],\n [[2.3825073, 2.8354228, 2.3825073], [3.1855922, 3.7911744, 3.1855922], [2.8496985, 3.391427, 2.8496985]],\n ]\n ),\n]\n\n\nclass TestRandGaussianSmooth(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, argments, image, expected_data):\n converter = RandGaussianSmooth(**argments)\n converter.set_random_state(seed=0)\n result = converter(image)\n np.testing.assert_allclose(result, expected_data, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet_block.py_unittest_for_spatial_dims_in_range.for_in_channels_in_range_.for_kernel_size_in_1_3_.TEST_CASE_RESBLOCK_append": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet_block.py_unittest_for_spatial_dims_in_range.for_in_channels_in_range_.for_kernel_size_in_1_3_.TEST_CASE_RESBLOCK_append", "embedding": null, "metadata": {"file_path": "tests/test_segresnet_block.py", "file_name": "test_segresnet_block.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 36, "span_ids": ["docstring"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.blocks.segresnet_block import ResBlock\n\nTEST_CASE_RESBLOCK = []\nfor spatial_dims in range(2, 4):\n for in_channels in range(1, 4):\n for kernel_size in [1, 3]:\n for norm_name in [\"group\", \"batch\", \"instance\"]:\n test_case = [\n {\n \"spatial_dims\": spatial_dims,\n \"in_channels\": in_channels,\n \"kernel_size\": kernel_size,\n \"norm_name\": norm_name,\n \"num_groups\": in_channels,\n },\n (2, in_channels, *([16] * spatial_dims)),\n (2, in_channels, *([16] * spatial_dims)),\n ]\n TEST_CASE_RESBLOCK.append(test_case)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet_block.py_TestResBlock_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_segresnet_block.py_TestResBlock_", "embedding": null, "metadata": {"file_path": "tests/test_segresnet_block.py", "file_name": "test_segresnet_block.py", "file_type": "text/x-python", "category": "test", "start_line": 39, "end_line": 58, "span_ids": ["impl:10", "TestResBlock.test_ill_arg", "TestResBlock", "TestResBlock.test_shape"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestResBlock(unittest.TestCase):\n @parameterized.expand(TEST_CASE_RESBLOCK)\n def test_shape(self, input_param, input_shape, expected_shape):\n net = ResBlock(**input_param)\n with eval_mode(net):\n result = net(torch.randn(input_shape))\n self.assertEqual(result.shape, expected_shape)\n\n def test_ill_arg(self):\n with self.assertRaises(AssertionError):\n ResBlock(spatial_dims=3, in_channels=8, kernel_size=2, num_groups=8)\n with self.assertRaises(ValueError):\n ResBlock(spatial_dims=3, in_channels=8, norm_name=\"norm\", num_groups=8)\n with self.assertRaises(AssertionError):\n ResBlock(spatial_dims=3, in_channels=8, num_groups=3)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simulatedelay.py_time_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simulatedelay.py_time_", "embedding": null, "metadata": {"file_path": "tests/test_simulatedelay.py", "file_name": "test_simulatedelay.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 35, "span_ids": ["TestSimulateDelay.test_value", "TestSimulateDelay", "impl", "docstring"], "tokens": 161}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import time\nimport unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms.utility.array import SimulateDelay\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestSimulateDelay(NumpyImageTestCase2D):\n @parameterized.expand([(0.45,), (1,)])\n def test_value(self, delay_test_time: float):\n resize = SimulateDelay(delay_time=delay_test_time)\n start: float = time.time()\n result = resize(self.imt[0])\n stop: float = time.time()\n measured_approximate: float = stop - start\n np.testing.assert_allclose(delay_test_time, measured_approximate, rtol=0.5)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simulatedelayd.py_time_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_simulatedelayd.py_time_", "embedding": null, "metadata": {"file_path": "tests/test_simulatedelayd.py", "file_name": "test_simulatedelayd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 35, "span_ids": ["TestSimulateDelay.test_value", "TestSimulateDelay", "impl", "docstring"], "tokens": 172}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import time\nimport unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms.utility.dictionary import SimulateDelayd\nfrom tests.utils import NumpyImageTestCase2D\n\n\nclass TestSimulateDelay(NumpyImageTestCase2D):\n @parameterized.expand([(0.45,), (1,)])\n def test_value(self, delay_test_time: float):\n resize = SimulateDelayd(keys=\"imgd\", delay_time=delay_test_time)\n start: float = time.time()\n _ = resize({\"imgd\": self.imt[0]})\n stop: float = time.time()\n measured_approximate: float = stop - start\n np.testing.assert_allclose(delay_test_time, measured_approximate, rtol=0.5)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedim.py_unittest_TEST_CASE_6._TypeError_dim_0_5_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_squeezedim.py_unittest_TEST_CASE_6._TypeError_dim_0_5_", "embedding": null, "metadata": {"file_path": "tests/test_squeezedim.py", "file_name": "test_squeezedim.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 32, "span_ids": ["docstring"], "tokens": 269}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import SqueezeDim\n\nTEST_CASE_1 = [{\"dim\": None}, np.random.rand(1, 2, 1, 3), (2, 3)]\n\nTEST_CASE_2 = [{\"dim\": 2}, np.random.rand(1, 2, 1, 8, 16), (1, 2, 8, 16)]\n\nTEST_CASE_3 = [{\"dim\": -1}, np.random.rand(1, 1, 16, 8, 1), (1, 1, 16, 8)]\n\nTEST_CASE_4 = [{}, np.random.rand(1, 2, 1, 3), (2, 1, 3)]\n\nTEST_CASE_4_PT = [{}, torch.rand(1, 2, 1, 3), (2, 1, 3)]\n\nTEST_CASE_5 = [ValueError, {\"dim\": -2}, np.random.rand(1, 1, 16, 8, 1)]\n\nTEST_CASE_6 = [TypeError, {\"dim\": 0.5}, np.random.rand(1, 1, 16, 8, 1)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_subpixel_upsample.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_subpixel_upsample.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_subpixel_upsample.py", "file_name": "test_subpixel_upsample.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 86, "span_ids": ["impl:27", "impl:17", "TestSUBPIXEL.test_subpixel_shape", "TestSUBPIXEL", "docstring"], "tokens": 647}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nimport torch.nn as nn\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.blocks import SubpixelUpsample\nfrom monai.networks.layers.factories import Conv\n\nTEST_CASE_SUBPIXEL = []\nfor inch in range(1, 5):\n for dim in range(1, 4):\n for factor in range(1, 3):\n test_case = [\n {\"dimensions\": dim, \"in_channels\": inch, \"scale_factor\": factor},\n (2, inch, *([8] * dim)),\n (2, inch, *([8 * factor] * dim)),\n ]\n TEST_CASE_SUBPIXEL.append(test_case)\n\nTEST_CASE_SUBPIXEL_2D_EXTRA = [\n {\"dimensions\": 2, \"in_channels\": 2, \"scale_factor\": 3},\n (2, 2, 8, 4), # different size for H and W\n (2, 2, 24, 12),\n]\n\nTEST_CASE_SUBPIXEL_3D_EXTRA = [\n {\"dimensions\": 3, \"in_channels\": 1, \"scale_factor\": 2},\n (2, 1, 16, 8, 4), # different size for H, W and D\n (2, 1, 32, 16, 8),\n]\n\nconv_block = nn.Sequential(\n Conv[Conv.CONV, 3](1, 4, kernel_size=1),\n Conv[Conv.CONV, 3](\n 4,\n 8,\n kernel_size=3,\n stride=1,\n padding=1,\n ),\n)\n\nTEST_CASE_SUBPIXEL_CONV_BLOCK_EXTRA = [\n {\"dimensions\": 3, \"in_channels\": 1, \"scale_factor\": 2, \"conv_block\": conv_block},\n (2, 1, 16, 8, 4), # different size for H, W and D\n (2, 1, 32, 16, 8),\n]\n\nTEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_2D_EXTRA)\nTEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_3D_EXTRA)\nTEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_CONV_BLOCK_EXTRA)\n\n\n# add every test back with the pad/pool sequential component omitted\nfor tests in list(TEST_CASE_SUBPIXEL):\n args: dict = tests[0] # type: ignore\n args = dict(args)\n args[\"apply_pad_pool\"] = False\n TEST_CASE_SUBPIXEL.append([args, tests[1], tests[2]])\n\n\nclass TestSUBPIXEL(unittest.TestCase):\n @parameterized.expand(TEST_CASE_SUBPIXEL)\n def test_subpixel_shape(self, input_param, input_shape, expected_shape):\n net = SubpixelUpsample(**input_param)\n with eval_mode(net):\n result = net.forward(torch.randn(input_shape))\n self.assertEqual(result.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_unittest_TEST_CASES": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_tversky_loss.py_unittest_TEST_CASES", "embedding": null, "metadata": {"file_path": "tests/test_tversky_loss.py", "file_name": "test_tversky_loss.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 154, "span_ids": ["docstring"], "tokens": 53}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.losses import TverskyLoss\nfrom tests.utils import SkipIfBeforePyTorchVersion, test_script_save\n\nTEST_CASES =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vnet.py_unittest_TEST_CASE_VNET_3D_3._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vnet.py_unittest_TEST_CASE_VNET_3D_3._", "embedding": null, "metadata": {"file_path": "tests/test_vnet.py", "file_name": "test_vnet.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 52, "span_ids": ["impl:13", "docstring"], "tokens": 508}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.nets import VNet\nfrom tests.utils import test_script_save\n\ndevice = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n\nTEST_CASE_VNET_2D_1 = [\n {\"spatial_dims\": 2, \"in_channels\": 4, \"out_channels\": 1, \"act\": \"elu\", \"dropout_dim\": 1},\n (1, 4, 32, 32),\n (1, 1, 32, 32),\n]\nTEST_CASE_VNET_2D_2 = [\n {\"spatial_dims\": 2, \"in_channels\": 2, \"out_channels\": 2, \"act\": \"prelu\", \"dropout_dim\": 2},\n (1, 2, 32, 32),\n (1, 2, 32, 32),\n]\nTEST_CASE_VNET_2D_3 = [\n {\"spatial_dims\": 2, \"in_channels\": 1, \"out_channels\": 3, \"dropout_dim\": 3},\n (1, 1, 32, 32),\n (1, 3, 32, 32),\n]\nTEST_CASE_VNET_3D_1 = [\n {\"spatial_dims\": 3, \"in_channels\": 4, \"out_channels\": 1, \"act\": \"elu\", \"dropout_dim\": 1},\n (1, 4, 32, 32, 32),\n (1, 1, 32, 32, 32),\n]\nTEST_CASE_VNET_3D_2 = [\n {\"spatial_dims\": 3, \"in_channels\": 2, \"out_channels\": 2, \"act\": \"prelu\", \"dropout_dim\": 2},\n (1, 2, 32, 32, 32),\n (1, 2, 32, 32, 32),\n]\nTEST_CASE_VNET_3D_3 = [\n {\"spatial_dims\": 3, \"in_channels\": 1, \"out_channels\": 3, \"dropout_dim\": 3},\n (1, 1, 32, 32, 32),\n (1, 3, 32, 32, 32),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vnet.py_TestVNet_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vnet.py_TestVNet_", "embedding": null, "metadata": {"file_path": "tests/test_vnet.py", "file_name": "test_vnet.py", "file_type": "text/x-python", "category": "test", "start_line": 55, "end_line": 76, "span_ids": ["TestVNet", "TestVNet.test_vnet_shape", "TestVNet.test_script"], "tokens": 204}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestVNet(unittest.TestCase):\n @parameterized.expand(\n [\n TEST_CASE_VNET_2D_1,\n TEST_CASE_VNET_2D_2,\n TEST_CASE_VNET_2D_3,\n TEST_CASE_VNET_3D_1,\n TEST_CASE_VNET_3D_2,\n TEST_CASE_VNET_3D_3,\n ]\n )\n def test_vnet_shape(self, input_param, input_shape, expected_shape):\n net = VNet(**input_param).to(device)\n with eval_mode(net):\n result = net.forward(torch.randn(input_shape).to(device))\n self.assertEqual(result.shape, expected_shape)\n\n def test_script(self):\n net = VNet(spatial_dims=3, in_channels=1, out_channels=3, dropout_dim=3)\n test_data = torch.randn(1, 1, 32, 32, 32)\n test_script_save(net, test_data)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensemble.py_unittest_TEST_CASE_5._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensemble.py_unittest_TEST_CASE_5._", "embedding": null, "metadata": {"file_path": "tests/test_vote_ensemble.py", "file_name": "test_vote_ensemble.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 52, "span_ids": ["docstring"], "tokens": 411}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import VoteEnsemble\n\n# shape: [1, 2, 1, 1]\nTEST_CASE_1 = [\n {\"num_classes\": None},\n [torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[0]], [[1]]]])],\n torch.tensor([[[[1.0]], [[0.0]]]]),\n]\n\n# shape: [1, 2, 1, 1]\nTEST_CASE_2 = [\n {\"num_classes\": None},\n torch.stack([torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[0]], [[1]]]])]),\n torch.tensor([[[[1.0]], [[0.0]]]]),\n]\n\n# shape: [1, 1, 2, 1]\nTEST_CASE_3 = [\n {\"num_classes\": 3},\n [torch.tensor([[[[0], [2]]]]), torch.tensor([[[[0], [2]]]]), torch.tensor([[[[1], [1]]]])],\n torch.tensor([[[[0], [2]]]]),\n]\n\n# shape: [1, 1, 2, 1]\nTEST_CASE_4 = [\n {\"num_classes\": 5},\n [torch.tensor([[[[0], [2]]]]), torch.tensor([[[[0], [2]]]]), torch.tensor([[[[1], [1]]]])],\n torch.tensor([[[[0], [2]]]]),\n]\n\n# shape: [2]\nTEST_CASE_5 = [\n {\"num_classes\": 3},\n [torch.tensor([0, 2]), torch.tensor([0, 2]), torch.tensor([1, 1])],\n torch.tensor([0, 2]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensemble.py_TestVoteEnsemble_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensemble.py_TestVoteEnsemble_", "embedding": null, "metadata": {"file_path": "tests/test_vote_ensemble.py", "file_name": "test_vote_ensemble.py", "file_type": "text/x-python", "category": "test", "start_line": 55, "end_line": 75, "span_ids": ["TestVoteEnsemble.test_cuda_value", "TestVoteEnsemble", "TestVoteEnsemble.test_value", "impl:11"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestVoteEnsemble(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_value(self, input_param, img, expected_value):\n result = VoteEnsemble(**input_param)(img)\n torch.testing.assert_allclose(result, expected_value)\n\n def test_cuda_value(self):\n img = torch.stack(\n [torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[0]], [[1]]]])]\n )\n expected_value = torch.tensor([[[[1.0]], [[0.0]]]])\n if torch.cuda.is_available():\n img = img.to(torch.device(\"cuda:0\"))\n expected_value = expected_value.to(torch.device(\"cuda:0\"))\n result = VoteEnsemble(num_classes=None)(img)\n torch.testing.assert_allclose(result, expected_value)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensembled.py_unittest_TEST_CASE_5._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensembled.py_unittest_TEST_CASE_5._", "embedding": null, "metadata": {"file_path": "tests/test_vote_ensembled.py", "file_name": "test_vote_ensembled.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 68, "span_ids": ["docstring:14", "docstring"], "tokens": 585}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.transforms import VoteEnsembled\n\n# shape: [1, 2, 1, 1]\nTEST_CASE_1 = [\n {\"keys\": [\"pred0\", \"pred1\", \"pred2\"], \"output_key\": \"output\", \"num_classes\": None},\n {\n \"pred0\": torch.tensor([[[[1]], [[0]]]]),\n \"pred1\": torch.tensor([[[[1]], [[0]]]]),\n \"pred2\": torch.tensor([[[[0]], [[1]]]]),\n },\n torch.tensor([[[[1.0]], [[0.0]]]]),\n]\n\n# shape: [1, 2, 1, 1]\nTEST_CASE_2 = [\n {\"keys\": \"output\", \"output_key\": \"output\", \"num_classes\": None},\n {\n \"output\": torch.stack(\n [torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[0]], [[1]]]])]\n )\n },\n torch.tensor([[[[1.0]], [[0.0]]]]),\n]\n\n# shape: [1, 1, 2, 1]\nTEST_CASE_3 = [\n {\"keys\": [\"pred0\", \"pred1\", \"pred2\"], \"output_key\": \"output\", \"num_classes\": 3},\n {\n \"pred0\": torch.tensor([[[[0], [2]]]]),\n \"pred1\": torch.tensor([[[[0], [2]]]]),\n \"pred2\": torch.tensor([[[[1], [1]]]]),\n },\n torch.tensor([[[[0], [2]]]]),\n]\n\n# shape: [1, 1, 2, 1]\nTEST_CASE_4 = [\n {\"keys\": [\"pred0\", \"pred1\", \"pred2\"], \"output_key\": \"output\", \"num_classes\": 5},\n {\n \"pred0\": torch.tensor([[[[0], [2]]]]),\n \"pred1\": torch.tensor([[[[0], [2]]]]),\n \"pred2\": torch.tensor([[[[1], [1]]]]),\n },\n torch.tensor([[[[0], [2]]]]),\n]\n\n# shape: [2]\nTEST_CASE_5 = [\n {\"keys\": [\"pred0\", \"pred1\", \"pred2\"], \"output_key\": \"output\", \"num_classes\": 3},\n {\"pred0\": torch.tensor([0, 2]), \"pred1\": torch.tensor([0, 2]), \"pred2\": torch.tensor([1, 1])},\n torch.tensor([0, 2]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensembled.py_TestVoteEnsembled_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_vote_ensembled.py_TestVoteEnsembled_", "embedding": null, "metadata": {"file_path": "tests/test_vote_ensembled.py", "file_name": "test_vote_ensembled.py", "file_type": "text/x-python", "category": "test", "start_line": 71, "end_line": 91, "span_ids": ["TestVoteEnsembled.test_cuda_value", "TestVoteEnsembled", "impl:11", "TestVoteEnsembled.test_value"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestVoteEnsembled(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_value(self, input_param, img, expected_value):\n result = VoteEnsembled(**input_param)(img)\n torch.testing.assert_allclose(result[\"output\"], expected_value)\n\n def test_cuda_value(self):\n img = torch.stack(\n [torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[1]], [[0]]]]), torch.tensor([[[[0]], [[1]]]])]\n )\n expected_value = torch.tensor([[[[1.0]], [[0.0]]]])\n if torch.cuda.is_available():\n img = img.to(torch.device(\"cuda:0\"))\n expected_value = expected_value.to(torch.device(\"cuda:0\"))\n result = VoteEnsembled(keys=\"output\", num_classes=None)({\"output\": img})\n torch.testing.assert_allclose(result[\"output\"], expected_value)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_check_hash_check_hash.return.True": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/utils.py_check_hash_check_hash.return.True", "embedding": null, "metadata": {"file_path": "monai/apps/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 37, "end_line": 68, "span_ids": ["check_hash"], "tokens": 278}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def check_hash(filepath: str, val: Optional[str] = None, hash_type: str = \"md5\") -> bool:\n \"\"\"\n Verify hash signature of specified file.\n\n Args:\n filepath: path of source file to verify hash value.\n val: expected hash value of the file.\n hash_type: 'md5' or 'sha1', defaults to 'md5'.\n\n \"\"\"\n if val is None:\n print(f\"Expected {hash_type} is None, skip {hash_type} check for file {filepath}.\")\n return True\n if hash_type.lower() == \"md5\":\n actual_hash = hashlib.md5()\n elif hash_type.lower() == \"sha1\":\n actual_hash = hashlib.sha1()\n else:\n raise NotImplementedError(f\"Unknown 'hash_type' {hash_type}.\")\n try:\n with open(filepath, \"rb\") as f:\n for chunk in iter(lambda: f.read(1024 * 1024), b\"\"):\n actual_hash.update(chunk)\n except Exception as e:\n print(f\"Exception in check_hash: {e}\")\n return False\n if val != actual_hash.hexdigest():\n print(\"check_hash failed.\")\n return False\n\n print(f\"Verified '{os.path.basename(filepath)}', {hash_type}: {val}.\")\n return True", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_os_get_config_values.return.output": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_os_get_config_values.return.output", "embedding": null, "metadata": {"file_path": "monai/config/deviceconfig.py", "file_name": "deviceconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 62, "span_ids": ["get_config_values", "docstring"], "tokens": 293}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport platform\nimport re\nimport sys\nfrom collections import OrderedDict\n\nimport numpy as np\nimport torch\n\nimport monai\nfrom monai.utils import OptionalImportError, get_package_version, optional_import\n\ntry:\n import itk # type: ignore\n\n itk_version = itk.Version.GetITKVersion()\n del itk\nexcept (ImportError, AttributeError):\n itk_version = \"NOT INSTALLED or UNKNOWN VERSION.\"\n\ntry:\n _, HAS_EXT = optional_import(\"monai._C\")\n USE_COMPILED = HAS_EXT and os.getenv(\"BUILD_MONAI\", \"0\") == \"1\"\nexcept (OptionalImportError, ImportError, AttributeError):\n HAS_EXT = USE_COMPILED = False\n\npsutil, has_psutil = optional_import(\"psutil\")\npsutil_version = psutil.__version__ if has_psutil else \"NOT INSTALLED or UNKNOWN VERSION.\"\n\n__all__ = [\n \"print_config\",\n \"get_system_info\",\n \"print_system_info\",\n \"get_gpu_info\",\n \"print_gpu_info\",\n \"print_debug_info\",\n \"USE_COMPILED\",\n]\n\n\ndef get_config_values():\n \"\"\"\n Read the package versions into a dictionary.\n \"\"\"\n output = OrderedDict()\n\n output[\"MONAI\"] = monai.__version__\n output[\"Numpy\"] = np.version.full_version\n output[\"Pytorch\"] = torch.__version__\n\n return output", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_get_optional_config_values_get_optional_config_values.return.output": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/config/deviceconfig.py_get_optional_config_values_get_optional_config_values.return.output", "embedding": null, "metadata": {"file_path": "monai/config/deviceconfig.py", "file_name": "deviceconfig.py", "file_type": "text/x-python", "category": "implementation", "start_line": 65, "end_line": 83, "span_ids": ["get_optional_config_values"], "tokens": 181}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_optional_config_values():\n \"\"\"\n Read the optional package versions into a dictionary.\n \"\"\"\n output = OrderedDict()\n\n output[\"Pytorch Ignite\"] = get_package_version(\"ignite\")\n output[\"Nibabel\"] = get_package_version(\"nibabel\")\n output[\"scikit-image\"] = get_package_version(\"skimage\")\n output[\"Pillow\"] = get_package_version(\"PIL\")\n output[\"Tensorboard\"] = get_package_version(\"tensorboard\")\n output[\"gdown\"] = get_package_version(\"gdown\")\n output[\"TorchVision\"] = get_package_version(\"torchvision\")\n output[\"ITK\"] = itk_version\n output[\"tqdm\"] = get_package_version(\"tqdm\")\n output[\"lmdb\"] = get_package_version(\"lmdb\")\n output[\"psutil\"] = psutil_version\n\n return output", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset_SmartCacheDataset._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset_SmartCacheDataset._", "embedding": null, "metadata": {"file_path": "monai/data/dataset.py", "file_name": "dataset.py", "file_type": "text/x-python", "category": "implementation", "start_line": 572, "end_line": 627, "span_ids": ["SmartCacheDataset"], "tokens": 864}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SmartCacheDataset(Randomizable, CacheDataset):\n \"\"\"\n Re-implementation of the SmartCache mechanism in NVIDIA Clara-train SDK.\n At any time, the cache pool only keeps a subset of the whole dataset. In each epoch, only the items\n in the cache are used for training. This ensures that data needed for training is readily available,\n keeping GPU resources busy. Note that cached items may still have to go through a non-deterministic\n transform sequence before being fed to GPU. At the same time, another thread is preparing replacement\n items by applying the transform sequence to items not in cache. Once one epoch is completed, Smart\n Cache replaces the same number of items with replacement items.\n Smart Cache uses a simple `running window` algorithm to determine the cache content and replacement items.\n Let N be the configured number of objects in cache; and R be the number of replacement objects (R = ceil(N * r),\n where r is the configured replace rate).\n For more details, please refer to:\n https://docs.nvidia.com/clara/tlt-mi/clara-train-sdk-v3.0/nvmidl/additional_features/smart_cache.html#smart-cache\n If passing slicing indices, will return a PyTorch Subset, for example: `data: Subset = dataset[1:4]`,\n for more details, please check: https://pytorch.org/docs/stable/data.html#torch.utils.data.Subset\n\n For example, if we have 5 images: `[image1, image2, image3, image4, image5]`, and `cache_num=4`, `replace_rate=0.25`.\n so the actual training images cached and replaced for every epoch are as below::\n\n epoch 1: [image1, image2, image3, image4]\n epoch 2: [image2, image3, image4, image5]\n epoch 3: [image3, image4, image5, image1]\n epoch 3: [image4, image5, image1, image2]\n epoch N: [image[N % 5] ...]\n\n The usage of `SmartCacheDataset` contains 4 steps:\n\n 1. Initialize `SmartCacheDataset` object and cache for the first epoch.\n 2. Call `start()` to run replacement thread in background.\n 3. Call `update_cache()` before every epoch to replace training items.\n 4. Call `shutdown()` when training ends.\n\n Note:\n This replacement will not work if setting the `multiprocessing_context` of DataLoader to `spawn`\n or on windows(the default multiprocessing method is `spawn`) and setting `num_workers` greater than 0.\n\n If using MONAI workflows, please add `SmartCacheHandler` to the handler list of trainer,\n otherwise, please make sure to call `start()`, `update_cache()`, `shutdown()` during training.\n\n Args:\n data: input data to load and transform to generate dataset for model.\n transform: transforms to execute operations on input data.\n replace_rate: percentage of the cached items to be replaced in every epoch.\n cache_num: number of items to be cached. Default is `sys.maxsize`.\n will take the minimum of (cache_num, data_length x cache_rate, data_length).\n cache_rate: percentage of cached data in total, default is 1.0 (cache all).\n will take the minimum of (cache_num, data_length x cache_rate, data_length).\n num_init_workers: the number of worker threads to initialize the cache for first epoch.\n If num_init_workers is None then the number returned by os.cpu_count() is used.\n num_replace_workers: the number of worker threads to prepare the replacement cache for every epoch.\n If num_replace_workers is None then the number returned by os.cpu_count() is used.\n progress: whether to display a progress bar when caching for the first epoch.\n shuffle: whether to shuffle the whole data list before preparing the cache content for first epoch.\n seed: random seed if shuffle is `True`, default to `0`.\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset.__init___SmartCacheDataset._restart.self__replace_mgr_start_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset.__init___SmartCacheDataset._restart.self__replace_mgr_start_", "embedding": null, "metadata": {"file_path": "monai/data/dataset.py", "file_name": "dataset.py", "file_type": "text/x-python", "category": "implementation", "start_line": 629, "end_line": 712, "span_ids": ["SmartCacheDataset._compute_data_idx", "SmartCacheDataset.__init__", "SmartCacheDataset.randomize", "SmartCacheDataset.is_started", "SmartCacheDataset._restart", "SmartCacheDataset.start"], "tokens": 670}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SmartCacheDataset(Randomizable, CacheDataset):\n\n def __init__(\n self,\n data: Sequence,\n transform: Union[Sequence[Callable], Callable],\n replace_rate: float,\n cache_num: int = sys.maxsize,\n cache_rate: float = 1.0,\n num_init_workers: Optional[int] = None,\n num_replace_workers: Optional[int] = None,\n progress: bool = True,\n shuffle: bool = True,\n seed: int = 0,\n ) -> None:\n if shuffle:\n self.set_random_state(seed=seed)\n self.randomize(data)\n\n super().__init__(data, transform, cache_num, cache_rate, num_init_workers, progress)\n if self._cache is None:\n self._cache = self._fill_cache()\n if self.cache_num >= len(data):\n warnings.warn(\"cache_num is greater or equal than dataset length, fall back to regular CacheDataset.\")\n if replace_rate <= 0:\n raise ValueError(\"replace_rate must be greater than 0, otherwise, please use CacheDataset.\")\n\n self.num_replace_workers: Optional[int] = num_replace_workers\n if self.num_replace_workers is not None:\n self.num_replace_workers = max(int(self.num_replace_workers), 1)\n\n self._total_num: int = len(data)\n self._replace_num: int = min(math.ceil(self.cache_num * replace_rate), len(data) - self.cache_num)\n self._replacements: List[Any] = [None for _ in range(self._replace_num)]\n self._replace_data_idx: List[int] = list(range(self._replace_num))\n\n self._start_pos: int = 0\n self._update_lock: threading.Lock = threading.Lock()\n self._round: int = 1\n self._replace_done: bool = False\n self._replace_mgr: Optional[threading.Thread] = None\n\n self._compute_data_idx()\n\n def randomize(self, data: Sequence) -> None:\n try:\n self.R.shuffle(data)\n except TypeError as e:\n warnings.warn(f\"input data can't be shuffled in SmartCacheDataset with numpy.random.shuffle(): {e}.\")\n\n def _compute_data_idx(self):\n \"\"\"\n Update the replacement data position in the total data.\n\n \"\"\"\n for i in range(self._replace_num):\n pos: int = self._start_pos + self.cache_num + i\n if pos >= self._total_num:\n pos -= self._total_num\n self._replace_data_idx[i] = pos\n\n def is_started(self):\n \"\"\"\n Check whether the replacement thread is already started.\n\n \"\"\"\n if self._replace_mgr is None:\n return False\n return self._replace_mgr.is_alive()\n\n def start(self):\n \"\"\"\n Start the background thread to replace training items for every epoch.\n\n \"\"\"\n if self._replace_mgr is None or not self.is_started():\n self._restart()\n\n def _restart(self):\n \"\"\"\n Restart background thread if killed for some reason.\n\n \"\"\"\n self._round = 1\n self._replace_mgr = threading.Thread(target=self.manage_replacement, daemon=True)\n self._replace_mgr.start()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset.update_cache_SmartCacheDataset.__len__.return.self_cache_num": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/dataset.py_SmartCacheDataset.update_cache_SmartCacheDataset.__len__.return.self_cache_num", "embedding": null, "metadata": {"file_path": "monai/data/dataset.py", "file_name": "dataset.py", "file_type": "text/x-python", "category": "implementation", "start_line": 740, "end_line": 828, "span_ids": ["SmartCacheDataset._replace_cache_thread", "SmartCacheDataset._compute_replacements", "SmartCacheDataset._try_shutdown", "SmartCacheDataset._try_manage_replacement", "SmartCacheDataset.shutdown", "SmartCacheDataset.__len__", "SmartCacheDataset.manage_replacement", "SmartCacheDataset.update_cache"], "tokens": 527}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SmartCacheDataset(Randomizable, CacheDataset):\n\n def update_cache(self):\n \"\"\"\n Update cache items for current epoch, need to call this function before every epoch.\n If the cache has been shutdown before, need to restart the `_replace_mgr` thread.\n\n \"\"\"\n if not self._replace_mgr.is_alive():\n self._restart()\n\n # make sure update is done\n while not self._try_update_cache():\n time.sleep(0.01)\n\n def _try_shutdown(self):\n \"\"\"\n Wait for thread lock to shut down the background thread.\n\n \"\"\"\n with self._update_lock:\n if self._replace_done:\n self._round = 0\n self._replace_done = False\n return True\n return False\n\n def shutdown(self):\n \"\"\"\n Shut down the background thread for replacement.\n\n \"\"\"\n if not self.is_started():\n return\n\n # wait until replace mgr is done the current round\n while not self._try_shutdown():\n time.sleep(0.01)\n self._replace_mgr.join()\n\n def _replace_cache_thread(self, index: int):\n \"\"\"\n Execute deterministic transforms on the new data for replacement.\n\n \"\"\"\n pos: int = self._replace_data_idx[index]\n self._replacements[index] = self._load_cache_item(pos)\n\n def _compute_replacements(self):\n \"\"\"\n Compute expected items for the replacement of next epoch, execute deterministic transforms.\n It can support multi-threads to accelerate the computation progress.\n\n \"\"\"\n with ThreadPool(self.num_replace_workers) as p:\n p.map(self._replace_cache_thread, list(range(self._replace_num)))\n\n self._replace_done = True\n\n def _try_manage_replacement(self, check_round):\n \"\"\"\n Wait thread lock and replace training items in the background thread.\n\n \"\"\"\n with self._update_lock:\n if self._round <= 0:\n # shutdown replacement\n self._replace_done = True\n return True, -1\n\n if self._round != check_round:\n self._compute_replacements()\n return False, self._round\n\n def manage_replacement(self):\n \"\"\"\n Background thread for replacement.\n\n \"\"\"\n check_round: int = -1\n done = False\n while not done:\n done, check_round = self._try_manage_replacement(check_round)\n time.sleep(0.01)\n\n def __len__(self):\n \"\"\"\n The dataset length is given by cache_num instead of len(data).\n\n \"\"\"\n return self.cache_num", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py__append_paths__append_paths.return.items": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py__append_paths__append_paths.return.items", "embedding": null, "metadata": {"file_path": "monai/data/decathlon_datalist.py", "file_name": "decathlon_datalist.py", "file_type": "text/x-python", "category": "implementation", "start_line": 59, "end_line": 81, "span_ids": ["_append_paths"], "tokens": 230}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _append_paths(base_dir: str, is_segmentation: bool, items: List[Dict]) -> List[Dict]:\n \"\"\"\n Args:\n base_dir: the base directory of the dataset.\n is_segmentation: whether the datalist is for segmentation task.\n items: list of data items, each of which is a dict keyed by element names.\n\n Raises:\n TypeError: When ``items`` contains a non ``dict``.\n\n \"\"\"\n for item in items:\n if not isinstance(item, dict):\n raise TypeError(f\"Every item in items must be a dict but got {type(item).__name__}.\")\n for k, v in item.items():\n if k == \"image\":\n item[k] = _compute_path(base_dir, v, check_path=False)\n elif is_segmentation and k == \"label\":\n item[k] = _compute_path(base_dir, v, check_path=False)\n else:\n # for other items, auto detect whether it's a valid path\n item[k] = _compute_path(base_dir, v, check_path=True)\n return items", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py_load_decathlon_datalist_load_decathlon_datalist.return._append_paths_base_dir_i": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py_load_decathlon_datalist_load_decathlon_datalist.return._append_paths_base_dir_i", "embedding": null, "metadata": {"file_path": "monai/data/decathlon_datalist.py", "file_name": "decathlon_datalist.py", "file_type": "text/x-python", "category": "implementation", "start_line": 73, "end_line": 117, "span_ids": ["load_decathlon_datalist"], "tokens": 431}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def load_decathlon_datalist(\n data_list_file_path: str,\n is_segmentation: bool = True,\n data_list_key: str = \"training\",\n base_dir: Optional[str] = None,\n) -> List[Dict]:\n \"\"\"Load image/label paths of decathlon challenge from JSON file\n\n Json file is similar to what you get from http://medicaldecathlon.com/\n Those dataset.json files\n\n Args:\n data_list_file_path: the path to the json file of datalist.\n is_segmentation: whether the datalist is for segmentation task, default is True.\n data_list_key: the key to get a list of dictionary to be used, default is \"training\".\n base_dir: the base directory of the dataset, if None, use the datalist directory.\n\n Raises:\n ValueError: When ``data_list_file_path`` does not point to a file.\n ValueError: When ``data_list_key`` is not specified in the data list file.\n\n Returns a list of data items, each of which is a dict keyed by element names, for example:\n\n .. code-block::\n\n [\n {'image': '/workspace/data/chest_19.nii.gz', 'label': 0},\n {'image': '/workspace/data/chest_31.nii.gz', 'label': 1}\n ]\n\n \"\"\"\n if not os.path.isfile(data_list_file_path):\n raise ValueError(f\"Data list file {data_list_file_path} does not exist.\")\n with open(data_list_file_path) as json_file:\n json_data = json.load(json_file)\n if data_list_key not in json_data:\n raise ValueError(f'Data list {data_list_key} not specified in \"{data_list_file_path}\".')\n expected_data = json_data[data_list_key]\n if data_list_key == \"test\":\n expected_data = [{\"image\": i} for i in expected_data]\n\n if base_dir is None:\n base_dir = os.path.dirname(data_list_file_path)\n\n return _append_paths(base_dir, is_segmentation, expected_data)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py_load_decathlon_properties_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/decathlon_datalist.py_load_decathlon_properties_", "embedding": null, "metadata": {"file_path": "monai/data/decathlon_datalist.py", "file_name": "decathlon_datalist.py", "file_type": "text/x-python", "category": "implementation", "start_line": 131, "end_line": 156, "span_ids": ["load_decathlon_properties"], "tokens": 232}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def load_decathlon_properties(\n data_property_file_path: str,\n property_keys: Union[Sequence[str], str],\n) -> Dict:\n \"\"\"Load the properties from the JSON file contains data property with specified `property_keys`.\n\n Args:\n data_property_file_path: the path to the JSON file of data properties.\n property_keys: expected keys to load from the JSON file, for example, we have these keys\n in the decathlon challenge:\n `name`, `description`, `reference`, `licence`, `tensorImageSize`,\n `modality`, `labels`, `numTraining`, `numTest`, etc.\n\n \"\"\"\n if not os.path.isfile(data_property_file_path):\n raise ValueError(f\"Data property file {data_property_file_path} does not exist.\")\n with open(data_property_file_path) as json_file:\n json_data = json.load(json_file)\n\n properties = {}\n for key in ensure_tuple(property_keys):\n if key not in json_data:\n raise KeyError(f\"key {key} is not in the data property file.\")\n properties[key] = json_data[key]\n return properties", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/grid_dataset.py_PatchDataset_PatchDataset.__len__.return.len_self_data_self_sam": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/grid_dataset.py_PatchDataset_PatchDataset.__len__.return.len_self_data_self_sam", "embedding": null, "metadata": {"file_path": "monai/data/grid_dataset.py", "file_name": "grid_dataset.py", "file_type": "text/x-python", "category": "implementation", "start_line": 168, "end_line": 228, "span_ids": ["PatchDataset.__init__", "PatchDataset.__len__", "PatchDataset"], "tokens": 522}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PatchDataset(Dataset):\n \"\"\"\n returns a patch from an image dataset.\n The patches are generated by a user-specified callable `patch_func`,\n and are optionally post-processed by `transform`.\n For example, to generate random patch samples from an image dataset:\n\n .. code-block:: python\n\n import numpy as np\n\n from monai.data import PatchDataset, DataLoader\n from monai.transforms import RandSpatialCropSamples, RandShiftIntensity\n\n # image dataset\n images = [np.arange(16, dtype=float).reshape(1, 4, 4),\n np.arange(16, dtype=float).reshape(1, 4, 4)]\n # image patch sampler\n n_samples = 5\n sampler = RandSpatialCropSamples(roi_size=(3, 3), num_samples=n_samples,\n random_center=True, random_size=False)\n # patch-level intensity shifts\n patch_intensity = RandShiftIntensity(offsets=1.0, prob=1.0)\n # construct the patch dataset\n ds = PatchDataset(dataset=images,\n patch_func=sampler,\n samples_per_image=n_samples,\n transform=patch_intensity)\n\n # use the patch dataset, length: len(images) x samplers_per_image\n print(len(ds))\n\n >>> 10\n\n for item in DataLoader(ds, batch_size=2, shuffle=True, num_workers=2):\n print(item.shape)\n\n >>> torch.Size([2, 1, 3, 3])\n\n \"\"\"\n\n def __init__(\n self, dataset: Sequence, patch_func: Callable, samples_per_image: int = 1, transform: Optional[Callable] = None\n ) -> None:\n \"\"\"\n Args:\n dataset: an image dataset to extract patches from.\n patch_func: converts an input image (item from dataset) into a sequence of image patches.\n patch_func(dataset[idx]) must return a sequence of patches (length `samples_per_image`).\n samples_per_image: `patch_func` should return a sequence of `samples_per_image` elements.\n transform: transform applied to each patch.\n \"\"\"\n super().__init__(data=dataset, transform=transform)\n\n self.patch_func = patch_func\n if samples_per_image <= 0:\n raise ValueError(\"sampler_per_image must be a positive integer.\")\n self.samples_per_image = int(samples_per_image)\n\n def __len__(self) -> int:\n return len(self.data) * self.samples_per_image", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ImageReader_ImageReader.verify_suffix.raise_NotImplementedError": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ImageReader_ImageReader.verify_suffix.raise_NotImplementedError", "embedding": null, "metadata": {"file_path": "monai/data/image_reader.py", "file_name": "image_reader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 38, "end_line": 55, "span_ids": ["ImageReader.verify_suffix", "ImageReader"], "tokens": 142}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ImageReader(ABC):\n \"\"\"Abstract class to define interface APIs to load image files.\n users need to call `read` to load image and then use `get_data`\n to get the image data and properties from meta data.\n\n \"\"\"\n\n @abstractmethod\n def verify_suffix(self, filename: Union[Sequence[str], str]) -> bool:\n \"\"\"\n Verify whether the specified file or files format is supported by current reader.\n\n Args:\n filename: file name or a list of file names to read.\n if a list of files, verify all the suffixes.\n\n \"\"\"\n raise NotImplementedError(f\"Subclass {self.__class__.__name__} must implement this method.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ImageReader.read_ImageReader.get_data.raise_NotImplementedError": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ImageReader.read_ImageReader.get_data.raise_NotImplementedError", "embedding": null, "metadata": {"file_path": "monai/data/image_reader.py", "file_name": "image_reader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 57, "end_line": 80, "span_ids": ["ImageReader.get_data", "ImageReader.read"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ImageReader(ABC):\n\n @abstractmethod\n def read(self, data: Union[Sequence[str], str], **kwargs) -> Union[Sequence[Any], Any]:\n \"\"\"\n Read image data from specified file or files.\n Note that it returns the raw data, so different readers return different image data type.\n\n Args:\n data: file name or a list of file names to read.\n kwargs: additional args for actual `read` API of 3rd party libs.\n\n \"\"\"\n raise NotImplementedError(f\"Subclass {self.__class__.__name__} must implement this method.\")\n\n @abstractmethod\n def get_data(self, img) -> Tuple[np.ndarray, Dict]:\n \"\"\"\n Extract data array and meta data from loaded image and return them.\n This function must return 2 objects, first is numpy array of image data, second is dict of meta data.\n\n Args:\n img: an image object loaded from a image file or a list of image objects.\n\n \"\"\"\n raise NotImplementedError(f\"Subclass {self.__class__.__name__} must implement this method.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader.read_ITKReader.read.return.img__if_len_filenames_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader.read_ITKReader.read.return.img__if_len_filenames_", "embedding": null, "metadata": {"file_path": "monai/data/image_reader.py", "file_name": "image_reader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 162, "end_line": 197, "span_ids": ["ITKReader.read"], "tokens": 375}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ITKReader(ImageReader):\n\n def read(self, data: Union[Sequence[str], str], **kwargs):\n \"\"\"\n Read image data from specified file or files.\n Note that the returned object is ITK image object or list of ITK image objects.\n\n Args:\n data: file name or a list of file names to read,\n kwargs: additional args for `itk.imread` API, will override `self.kwargs` for existing keys.\n More details about available args:\n https://github.com/InsightSoftwareConsortium/ITK/blob/master/Wrapping/Generators/Python/itkExtras.py\n\n \"\"\"\n img_: List[Image] = []\n\n filenames: Sequence[str] = ensure_tuple(data)\n kwargs_ = self.kwargs.copy()\n kwargs_.update(kwargs)\n for name in filenames:\n if os.path.isdir(name):\n # read DICOM series of 1 image in a folder, refer to: https://github.com/RSIP-Vision/medio\n names_generator = itk.GDCMSeriesFileNames.New()\n names_generator.SetUseSeriesDetails(True)\n names_generator.AddSeriesRestriction(\"0008|0021\") # Series Date\n names_generator.SetDirectory(name)\n series_uid = names_generator.GetSeriesUIDs()\n\n if len(series_uid) == 0:\n raise FileNotFoundError(f\"no DICOMs in: {name}.\")\n if len(series_uid) > 1:\n raise OSError(f\"the directory: {name} contains more than one DICOM series.\")\n\n series_identifier = series_uid[0]\n name = names_generator.GetFileNames(series_identifier)\n\n img_.append(itk.imread(name, **kwargs_))\n return img_ if len(filenames) > 1 else img_[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader._get_meta_dict_ITKReader._get_meta_dict.return.meta_dict": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_ITKReader._get_meta_dict_ITKReader._get_meta_dict.return.meta_dict", "embedding": null, "metadata": {"file_path": "monai/data/image_reader.py", "file_name": "image_reader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 226, "end_line": 254, "span_ids": ["ITKReader._get_meta_dict"], "tokens": 256}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ITKReader(ImageReader):\n\n def _get_meta_dict(self, img) -> Dict:\n \"\"\"\n Get all the meta data of the image and convert to dict type.\n\n Args:\n img: a ITK image object loaded from a image file.\n\n \"\"\"\n img_meta_dict = img.GetMetaDataDictionary()\n meta_dict = {}\n for key in img_meta_dict.GetKeys():\n # ignore deprecated, legacy members that cause issues\n if key.startswith(\"ITK_original_\"):\n continue\n if (\n key == \"NRRD_measurement frame\"\n and int(itk.Version.GetITKMajorVersion()) == 5\n and int(itk.Version.GetITKMinorVersion()) < 2\n ):\n warnings.warn(\n \"Ignoring 'measurement frame' field. \"\n \"Correct reading of NRRD05 files requires ITK >= 5.2: `pip install --upgrade --pre itk`\"\n )\n continue\n meta_dict[key] = img_meta_dict[key]\n meta_dict[\"origin\"] = np.asarray(img.GetOrigin())\n meta_dict[\"spacing\"] = np.asarray(img.GetSpacing())\n meta_dict[\"direction\"] = itk.array_from_matrix(img.GetDirection())\n return meta_dict", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NibabelReader.read_NibabelReader.read.return.img__if_len_filenames_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NibabelReader.read_NibabelReader.read.return.img__if_len_filenames_", "embedding": null, "metadata": {"file_path": "monai/data/image_reader.py", "file_name": "image_reader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 345, "end_line": 366, "span_ids": ["NibabelReader.read"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NibabelReader(ImageReader):\n\n def read(self, data: Union[Sequence[str], str], **kwargs):\n \"\"\"\n Read image data from specified file or files.\n Note that the returned object is Nibabel image object or list of Nibabel image objects.\n\n Args:\n data: file name or a list of file names to read.\n kwargs: additional args for `nibabel.load` API, will override `self.kwargs` for existing keys.\n More details about available args:\n https://github.com/nipy/nibabel/blob/master/nibabel/loadsave.py\n\n \"\"\"\n img_: List[Nifti1Image] = []\n\n filenames: Sequence[str] = ensure_tuple(data)\n kwargs_ = self.kwargs.copy()\n kwargs_.update(kwargs)\n for name in filenames:\n img = nib.load(name, **kwargs_)\n img = correct_nifti_header_if_necessary(img)\n img_.append(img)\n return img_ if len(filenames) > 1 else img_[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NumpyReader_NumpyReader.verify_suffix.return.is_supported_format_filen": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NumpyReader_NumpyReader.verify_suffix.return.is_supported_format_filen", "embedding": null, "metadata": {"file_path": "monai/data/image_reader.py", "file_name": "image_reader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 377, "end_line": 407, "span_ids": ["NumpyReader.__init__", "NumpyReader.verify_suffix", "NumpyReader"], "tokens": 303}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NumpyReader(ImageReader):\n \"\"\"\n Load NPY or NPZ format data based on Numpy library, they can be arrays or pickled objects.\n A typical usage is to load the `mask` data for classification task.\n It can load part of the npz file with specified `npz_keys`.\n\n Args:\n npz_keys: if loading npz file, only load the specified keys, if None, load all the items.\n stack the loaded items together to construct a new first dimension.\n kwargs: additional args for `numpy.load` API except `allow_pickle`. more details about available args:\n https://numpy.org/doc/stable/reference/generated/numpy.load.html\n\n \"\"\"\n\n def __init__(self, npz_keys: Optional[KeysCollection] = None, **kwargs):\n super().__init__()\n if npz_keys is not None:\n npz_keys = ensure_tuple(npz_keys)\n self.npz_keys = npz_keys\n self.kwargs = kwargs\n\n def verify_suffix(self, filename: Union[Sequence[str], str]) -> bool:\n \"\"\"\n Verify whether the specified file or files format is supported by Numpy reader.\n\n Args:\n filename: file name or a list of file names to read.\n if a list of files, verify all the suffixes.\n \"\"\"\n suffixes: Sequence[str] = [\"npz\", \"npy\"]\n return is_supported_format(filename, suffixes)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NumpyReader.read_NumpyReader.read.return.img__if_len_img__1_els": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_NumpyReader.read_NumpyReader.read.return.img__if_len_img__1_els", "embedding": null, "metadata": {"file_path": "monai/data/image_reader.py", "file_name": "image_reader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 478, "end_line": 505, "span_ids": ["NumpyReader.read"], "tokens": 267}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NumpyReader(ImageReader):\n\n def read(self, data: Union[Sequence[str], str], **kwargs):\n \"\"\"\n Read image data from specified file or files.\n Note that the returned object is Numpy array or list of Numpy arrays.\n\n Args:\n data: file name or a list of file names to read.\n kwargs: additional args for `numpy.load` API except `allow_pickle`, will override `self.kwargs` for existing keys.\n More details about available args:\n https://numpy.org/doc/stable/reference/generated/numpy.load.html\n\n \"\"\"\n img_: List[Nifti1Image] = []\n\n filenames: Sequence[str] = ensure_tuple(data)\n kwargs_ = self.kwargs.copy()\n kwargs_.update(kwargs)\n for name in filenames:\n img = np.load(name, allow_pickle=True, **kwargs_)\n if name.endswith(\".npz\"):\n # load expected items from NPZ file\n npz_keys = [f\"arr_{i}\" for i in range(len(img))] if self.npz_keys is None else self.npz_keys\n for k in npz_keys:\n img_.append(img[k])\n else:\n img_.append(img)\n\n return img_ if len(img_) > 1 else img_[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_PILReader.read_PILReader.read.return.img__if_len_filenames_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/image_reader.py_PILReader.read_PILReader.read.return.img__if_len_filenames_", "embedding": null, "metadata": {"file_path": "monai/data/image_reader.py", "file_name": "image_reader.py", "file_type": "text/x-python", "category": "implementation", "start_line": 562, "end_line": 585, "span_ids": ["PILReader.read"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class PILReader(ImageReader):\n\n def read(self, data: Union[Sequence[str], str, np.ndarray], **kwargs):\n \"\"\"\n Read image data from specified file or files.\n Note that the returned object is PIL image or list of PIL image.\n\n Args:\n data: file name or a list of file names to read.\n kwargs: additional args for `Image.open` API in `read()`, will override `self.kwargs` for existing keys.\n Mode details about available args:\n https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.open\n\n \"\"\"\n img_: List[PILImage.Image] = []\n\n filenames: Sequence[str] = ensure_tuple(data)\n kwargs_ = self.kwargs.copy()\n kwargs_.update(kwargs)\n for name in filenames:\n img = PILImage.open(name, **kwargs_)\n if callable(self.converter):\n img = self.converter(img)\n img_.append(img)\n\n return img_ if len(filenames) > 1 else img_[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_get_random_patch_get_random_patch.return.tuple_slice_mc_mc_ps_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_get_random_patch_get_random_patch.return.tuple_slice_mc_mc_ps_", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 38, "end_line": 60, "span_ids": ["get_random_patch"], "tokens": 268}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_random_patch(\n dims: Sequence[int], patch_size: Sequence[int], rand_state: Optional[np.random.RandomState] = None\n) -> Tuple[slice, ...]:\n \"\"\"\n Returns a tuple of slices to define a random patch in an array of shape `dims` with size `patch_size` or the as\n close to it as possible within the given dimension. It is expected that `patch_size` is a valid patch for a source\n of shape `dims` as returned by `get_valid_patch_size`.\n\n Args:\n dims: shape of source array\n patch_size: shape of patch size to generate\n rand_state: a random state object to generate random numbers from\n\n Returns:\n (tuple of slice): a tuple of slice objects defining the patch\n \"\"\"\n\n # choose the minimal corner of the patch\n rand_int = np.random.randint if rand_state is None else rand_state.randint\n min_corner = tuple(rand_int(0, ms - ps + 1) if ms > ps else 0 for ms, ps in zip(dims, patch_size))\n\n # create the slices for each dimension which define the patch in the source array\n return tuple(slice(mc, mc + ps) for mc, ps in zip(min_corner, patch_size))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_compute_importance_map_compute_importance_map.return.importance_map": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/data/utils.py_compute_importance_map_compute_importance_map.return.importance_map", "embedding": null, "metadata": {"file_path": "monai/data/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 666, "end_line": 718, "span_ids": ["compute_importance_map"], "tokens": 543}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def compute_importance_map(\n patch_size: Tuple[int, ...],\n mode: Union[BlendMode, str] = BlendMode.CONSTANT,\n sigma_scale: Union[Sequence[float], float] = 0.125,\n device: Union[torch.device, int, str] = \"cpu\",\n) -> torch.Tensor:\n \"\"\"Get importance map for different weight modes.\n\n Args:\n patch_size: Size of the required importance map. This should be either H, W [,D].\n mode: {``\"constant\"``, ``\"gaussian\"``}\n How to blend output of overlapping windows. Defaults to ``\"constant\"``.\n\n - ``\"constant``\": gives equal weight to all predictions.\n - ``\"gaussian``\": gives less weight to predictions on edges of windows.\n\n sigma_scale: Sigma_scale to calculate sigma for each dimension\n (sigma = sigma_scale * dim_size). Used for gaussian mode only.\n device: Device to put importance map on.\n\n Raises:\n ValueError: When ``mode`` is not one of [\"constant\", \"gaussian\"].\n\n Returns:\n Tensor of size patch_size.\n\n \"\"\"\n mode = BlendMode(mode)\n device = torch.device(device) # type: ignore[arg-type]\n if mode == BlendMode.CONSTANT:\n importance_map = torch.ones(patch_size, device=device).float()\n elif mode == BlendMode.GAUSSIAN:\n center_coords = [i // 2 for i in patch_size]\n sigma_scale = ensure_tuple_rep(sigma_scale, len(patch_size))\n sigmas = [i * sigma_s for i, sigma_s in zip(patch_size, sigma_scale)]\n\n importance_map = torch.zeros(patch_size, device=device)\n importance_map[tuple(center_coords)] = 1\n pt_gaussian = GaussianFilter(len(patch_size), sigmas).to(device=device, dtype=torch.float)\n importance_map = pt_gaussian(importance_map.unsqueeze(0).unsqueeze(0))\n importance_map = importance_map.squeeze(0).squeeze(0)\n importance_map = importance_map / torch.max(importance_map)\n importance_map = importance_map.float()\n\n # importance_map cannot be 0, otherwise we may end up with nans!\n min_non_zero = importance_map[importance_map != 0].min().item()\n importance_map = torch.clamp(importance_map, min=min_non_zero)\n else:\n raise ValueError(\n f\"Unsupported mode: {mode}, available options are [{BlendMode.CONSTANT}, {BlendMode.CONSTANT}].\"\n )\n\n return importance_map", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/smartcache_handler.py_from_typing_import_TYPE_C_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/handlers/smartcache_handler.py_from_typing_import_TYPE_C_", "embedding": null, "metadata": {"file_path": "monai/handlers/smartcache_handler.py", "file_name": "smartcache_handler.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 79, "span_ids": ["SmartCacheHandler.__init__", "SmartCacheHandler.completed", "SmartCacheHandler.started", "SmartCacheHandler.epoch_completed", "SmartCacheHandler.attach", "docstring", "SmartCacheHandler"], "tokens": 506}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import TYPE_CHECKING\n\nfrom monai.data import SmartCacheDataset\nfrom monai.utils import exact_version, optional_import\n\nEvents, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Events\")\nif TYPE_CHECKING:\n from ignite.engine import Engine\nelse:\n Engine, _ = optional_import(\"ignite.engine\", \"0.4.4\", exact_version, \"Engine\")\n\n\nclass SmartCacheHandler:\n \"\"\"\n Attach SmartCache logic to the engine in Ignite.\n Mainly include the `start`, `update_cache`, and `shutdown` functions of SmartCacheDataset.\n\n \"\"\"\n\n def __init__(self, smartcacher: SmartCacheDataset) -> None:\n \"\"\"\n Args:\n smartcacher: predefined SmartCacheDataset, will attach it to the engine.\n\n Raises:\n TypeError: When ``smartcacher`` is not a ``monai.data.SmartCacheDataset``.\n\n \"\"\"\n if not isinstance(smartcacher, SmartCacheDataset):\n raise TypeError(\"smartcacher must be a monai.data.SmartCacheDataset.\")\n self.smartcacher = smartcacher\n\n def attach(self, engine: Engine) -> None:\n \"\"\"\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n engine.add_event_handler(Events.STARTED, self.started)\n engine.add_event_handler(Events.EPOCH_COMPLETED, self.epoch_completed)\n engine.add_event_handler(Events.COMPLETED, self.completed)\n\n def started(self, engine: Engine) -> None:\n \"\"\"Callback for train or validation/evaluation started Event.\n Start the replacement thread of SmartCacheDataset.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n self.smartcacher.start()\n\n def epoch_completed(self, engine: Engine) -> None:\n \"\"\"Callback for train or validation/evaluation epoch completed Event.\n Update cache content with replacement data.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n self.smartcacher.update_cache()\n\n def completed(self, engine: Engine) -> None:\n \"\"\"Callback for train or validation/evaluation completed Event.\n Stop the replacement thread of SmartCacheDataset.\n\n Args:\n engine: Ignite Engine, it can be a trainer, validator or evaluator.\n \"\"\"\n self.smartcacher.shutdown()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/hausdorff_distance.py_compute_percent_hausdorff_distance_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/hausdorff_distance.py_compute_percent_hausdorff_distance_", "embedding": null, "metadata": {"file_path": "monai/metrics/hausdorff_distance.py", "file_name": "hausdorff_distance.py", "file_type": "text/x-python", "category": "implementation", "start_line": 151, "end_line": 173, "span_ids": ["compute_percent_hausdorff_distance"], "tokens": 164}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def compute_percent_hausdorff_distance(\n edges_pred: np.ndarray,\n edges_gt: np.ndarray,\n distance_metric: str = \"euclidean\",\n percentile: Optional[float] = None,\n):\n \"\"\"\n This function is used to compute the directed Hausdorff distance.\n \"\"\"\n\n surface_distance = get_surface_distance(edges_pred, edges_gt, distance_metric=distance_metric)\n\n # for both pred and gt do not have foreground\n if surface_distance.shape == (0,):\n return np.nan\n\n if not percentile:\n return surface_distance.max()\n\n if 0 <= percentile <= 100:\n return np.percentile(surface_distance, percentile)\n raise ValueError(f\"percentile should be a value between 0 and 100, get {percentile}.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_get_mask_edges_get_mask_edges.return._edges_pred_edges_gt_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_get_mask_edges_get_mask_edges.return._edges_pred_edges_gt_", "embedding": null, "metadata": {"file_path": "monai/metrics/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 106, "end_line": 168, "span_ids": ["get_mask_edges"], "tokens": 621}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_mask_edges(\n seg_pred: Union[np.ndarray, torch.Tensor],\n seg_gt: Union[np.ndarray, torch.Tensor],\n label_idx: int = 1,\n crop: bool = True,\n) -> Tuple[np.ndarray, np.ndarray]:\n \"\"\"\n Do binary erosion and use XOR for input to get the edges. This\n function is helpful to further calculate metrics such as Average Surface\n Distance and Hausdorff Distance.\n The input images can be binary or labelfield images. If labelfield images\n are supplied, they are converted to binary images using `label_idx`.\n\n `scipy`'s binary erosion is used to to calculate the edges of the binary\n labelfield.\n\n In order to improve the computing efficiency, before getting the edges,\n the images can be cropped and only keep the foreground if not specifies\n ``crop = False``.\n\n We require that images are the same size, and assume that they occupy the\n same space (spacing, orientation, etc.).\n\n Args:\n seg_pred: the predicted binary or labelfield image.\n seg_gt: the actual binary or labelfield image.\n label_idx: for labelfield images, convert to binary with\n `seg_pred = seg_pred == label_idx`.\n crop: crop input images and only keep the foregrounds. In order to\n maintain two inputs' shapes, here the bounding box is achieved\n by ``(seg_pred | seg_gt)`` which represents the union set of two\n images. Defaults to ``True``.\n \"\"\"\n\n # Get both labelfields as np arrays\n if isinstance(seg_pred, torch.Tensor):\n seg_pred = seg_pred.detach().cpu().numpy()\n if isinstance(seg_gt, torch.Tensor):\n seg_gt = seg_gt.detach().cpu().numpy()\n\n if seg_pred.shape != seg_gt.shape:\n raise ValueError(\"seg_pred and seg_gt should have same shapes.\")\n\n # If not binary images, convert them\n if seg_pred.dtype != bool:\n seg_pred = seg_pred == label_idx\n if seg_gt.dtype != bool:\n seg_gt = seg_gt == label_idx\n\n if crop:\n if not np.any(seg_pred | seg_gt):\n return (np.zeros_like(seg_pred), np.zeros_like(seg_gt))\n\n seg_pred, seg_gt = np.expand_dims(seg_pred, 0), np.expand_dims(seg_gt, 0)\n box_start, box_end = generate_spatial_bounding_box(np.asarray(seg_pred | seg_gt))\n cropper = SpatialCrop(roi_start=box_start, roi_end=box_end)\n seg_pred, seg_gt = np.squeeze(cropper(seg_pred)), np.squeeze(cropper(seg_gt))\n\n # Do binary erosion and use XOR to get edges\n edges_pred = binary_erosion(seg_pred) ^ seg_pred\n edges_gt = binary_erosion(seg_gt) ^ seg_gt\n\n return (edges_pred, edges_gt)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_get_surface_distance_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/metrics/utils.py_get_surface_distance_", "embedding": null, "metadata": {"file_path": "monai/metrics/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 171, "end_line": 204, "span_ids": ["get_surface_distance"], "tokens": 318}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_surface_distance(\n seg_pred: np.ndarray,\n seg_gt: np.ndarray,\n distance_metric: str = \"euclidean\",\n) -> np.ndarray:\n \"\"\"\n This function is used to compute the surface distances from `seg_pred` to `seg_gt`.\n\n Args:\n seg_pred: the edge of the predictions.\n seg_gt: the edge of the ground truth.\n distance_metric: : [``\"euclidean\"``, ``\"chessboard\"``, ``\"taxicab\"``]\n the metric used to compute surface distance. Defaults to ``\"euclidean\"``.\n\n - ``\"euclidean\"``, uses Exact Euclidean distance transform.\n - ``\"chessboard\"``, uses `chessboard` metric in chamfer type of transform.\n - ``\"taxicab\"``, uses `taxicab` metric in chamfer type of transform.\n \"\"\"\n\n if not np.any(seg_gt):\n dis = np.inf * np.ones_like(seg_gt)\n else:\n if not np.any(seg_pred):\n dis = np.inf * np.ones_like(seg_gt)\n return np.asarray(dis[seg_gt])\n if distance_metric == \"euclidean\":\n dis = distance_transform_edt(~seg_gt)\n elif distance_metric in [\"chessboard\", \"taxicab\"]:\n dis = distance_transform_cdt(~seg_gt, metric=distance_metric)\n else:\n raise ValueError(f\"distance_metric {distance_metric} is not implemented.\")\n\n return np.asarray(dis[seg_pred])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_from_typing_import_Dict__UnetResBlock.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_from_typing_import_Dict__UnetResBlock.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/dynunet_block.py", "file_name": "dynunet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 94, "span_ids": ["UnetResBlock.forward", "UnetResBlock.__init__", "UnetResBlock", "docstring"], "tokens": 663}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Dict, Optional, Sequence, Tuple, Union\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.blocks.convolutions import Convolution\nfrom monai.networks.layers.factories import Act, Norm, split_args\n\n\nclass UnetResBlock(nn.Module):\n \"\"\"\n A skip-connection based module that can be used for DynUNet, based on:\n `Automated Design of Deep Learning Methods for Biomedical Image Segmentation `_.\n `nnU-Net: Self-adapting Framework for U-Net-Based Medical Image Segmentation `_.\n\n Args:\n spatial_dims: number of spatial dimensions.\n in_channels: number of input channels.\n out_channels: number of output channels.\n kernel_size: convolution kernel size.\n stride: convolution stride.\n norm_name: [``\"batch\"``, ``\"instance\"``, ``\"group\"``]\n feature normalization type and arguments. In this module, if using ``\"group\"``,\n `in_channels` should be divisible by 16 (default value for ``num_groups``).\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n out_channels: int,\n kernel_size: Union[Sequence[int], int],\n stride: Union[Sequence[int], int],\n norm_name: str,\n ):\n super(UnetResBlock, self).__init__()\n self.conv1 = get_conv_layer(\n spatial_dims,\n in_channels,\n out_channels,\n kernel_size=kernel_size,\n stride=stride,\n conv_only=True,\n )\n self.conv2 = get_conv_layer(\n spatial_dims,\n out_channels,\n out_channels,\n kernel_size=kernel_size,\n stride=1,\n conv_only=True,\n )\n self.conv3 = get_conv_layer(\n spatial_dims,\n in_channels,\n out_channels,\n kernel_size=1,\n stride=stride,\n conv_only=True,\n )\n self.lrelu = get_acti_layer((\"leakyrelu\", {\"inplace\": True, \"negative_slope\": 0.01}))\n self.norm1 = get_norm_layer(spatial_dims, out_channels, norm_name)\n self.norm2 = get_norm_layer(spatial_dims, out_channels, norm_name)\n self.norm3 = get_norm_layer(spatial_dims, out_channels, norm_name)\n self.downsample = in_channels != out_channels\n stride_np = np.atleast_1d(stride)\n if not np.all(stride_np == 1):\n self.downsample = True\n\n def forward(self, inp):\n residual = inp\n out = self.conv1(inp)\n out = self.norm1(out)\n out = self.lrelu(out)\n out = self.conv2(out)\n out = self.norm2(out)\n if self.downsample:\n residual = self.conv3(residual)\n residual = self.norm3(residual)\n out += residual\n out = self.lrelu(out)\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetBasicBlock_UnetBasicBlock.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetBasicBlock_UnetBasicBlock.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/dynunet_block.py", "file_name": "dynunet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 151, "span_ids": ["UnetBasicBlock", "UnetBasicBlock.forward", "UnetBasicBlock.__init__"], "tokens": 471}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class UnetBasicBlock(nn.Module):\n \"\"\"\n A CNN module module that can be used for DynUNet, based on:\n `Automated Design of Deep Learning Methods for Biomedical Image Segmentation `_.\n `nnU-Net: Self-adapting Framework for U-Net-Based Medical Image Segmentation `_.\n\n Args:\n spatial_dims: number of spatial dimensions.\n in_channels: number of input channels.\n out_channels: number of output channels.\n kernel_size: convolution kernel size.\n stride: convolution stride.\n norm_name: [``\"batch\"``, ``\"instance\"``, ``\"group\"``]\n feature normalization type and arguments. In this module, if using ``\"group\"``,\n `in_channels` should be divisible by 16 (default value for ``num_groups``).\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n out_channels: int,\n kernel_size: Union[Sequence[int], int],\n stride: Union[Sequence[int], int],\n norm_name: str,\n ):\n super(UnetBasicBlock, self).__init__()\n self.conv1 = get_conv_layer(\n spatial_dims,\n in_channels,\n out_channels,\n kernel_size=kernel_size,\n stride=stride,\n conv_only=True,\n )\n self.conv2 = get_conv_layer(\n spatial_dims,\n out_channels,\n out_channels,\n kernel_size=kernel_size,\n stride=1,\n conv_only=True,\n )\n self.lrelu = get_acti_layer((\"leakyrelu\", {\"inplace\": True, \"negative_slope\": 0.01}))\n self.norm1 = get_norm_layer(spatial_dims, out_channels, norm_name)\n self.norm2 = get_norm_layer(spatial_dims, out_channels, norm_name)\n\n def forward(self, inp):\n out = self.conv1(inp)\n out = self.norm1(out)\n out = self.lrelu(out)\n out = self.conv2(out)\n out = self.norm2(out)\n out = self.lrelu(out)\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetUpBlock_UnetUpBlock.forward.return.out": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetUpBlock_UnetUpBlock.forward.return.out", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/dynunet_block.py", "file_name": "dynunet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 154, "end_line": 207, "span_ids": ["UnetUpBlock.__init__", "UnetUpBlock.forward", "UnetUpBlock"], "tokens": 459}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class UnetUpBlock(nn.Module):\n \"\"\"\n An upsampling module that can be used for DynUNet, based on:\n `Automated Design of Deep Learning Methods for Biomedical Image Segmentation `_.\n `nnU-Net: Self-adapting Framework for U-Net-Based Medical Image Segmentation `_.\n\n Args:\n spatial_dims: number of spatial dimensions.\n in_channels: number of input channels.\n out_channels: number of output channels.\n kernel_size: convolution kernel size.\n stride: convolution stride.\n upsample_kernel_size: convolution kernel size for transposed convolution layers.\n norm_name: [``\"batch\"``, ``\"instance\"``, ``\"group\"``]\n feature normalization type and arguments. In this module, if using ``\"group\"``,\n `in_channels` should be divisible by 16 (default value for ``num_groups``).\n \"\"\"\n\n def __init__(\n self,\n spatial_dims: int,\n in_channels: int,\n out_channels: int,\n kernel_size: Union[Sequence[int], int],\n stride: Union[Sequence[int], int],\n upsample_kernel_size: Union[Sequence[int], int],\n norm_name: str,\n ):\n super(UnetUpBlock, self).__init__()\n upsample_stride = upsample_kernel_size\n self.transp_conv = get_conv_layer(\n spatial_dims,\n in_channels,\n out_channels,\n kernel_size=upsample_kernel_size,\n stride=upsample_stride,\n conv_only=True,\n is_transposed=True,\n )\n self.conv_block = UnetBasicBlock(\n spatial_dims,\n out_channels + out_channels,\n out_channels,\n kernel_size=kernel_size,\n stride=1,\n norm_name=norm_name,\n )\n\n def forward(self, inp, skip):\n # number of channels for skip should equals to out_channels\n out = self.transp_conv(inp)\n out = torch.cat((out, skip), dim=1)\n out = self.conv_block(out)\n return out", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetOutBlock_get_acti_layer.return.act_type_act_args_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_UnetOutBlock_get_acti_layer.return.act_type_act_args_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/dynunet_block.py", "file_name": "dynunet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 210, "end_line": 225, "span_ids": ["get_acti_layer", "UnetOutBlock.forward", "UnetOutBlock", "UnetOutBlock.__init__"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class UnetOutBlock(nn.Module):\n def __init__(self, spatial_dims: int, in_channels: int, out_channels: int):\n super(UnetOutBlock, self).__init__()\n self.conv = get_conv_layer(\n spatial_dims, in_channels, out_channels, kernel_size=1, stride=1, bias=True, conv_only=True\n )\n\n def forward(self, inp):\n out = self.conv(inp)\n return out\n\n\ndef get_acti_layer(act: Union[Tuple[str, Dict], str]):\n act_name, act_args = split_args(act)\n act_type = Act[act_name]\n return act_type(**act_args)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_conv_layer_get_conv_layer.return.Convolution_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_conv_layer_get_conv_layer.return.Convolution_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/dynunet_block.py", "file_name": "dynunet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 240, "end_line": 269, "span_ids": ["get_conv_layer"], "tokens": 215}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_conv_layer(\n spatial_dims: int,\n in_channels: int,\n out_channels: int,\n kernel_size: Union[Sequence[int], int] = 3,\n stride: Union[Sequence[int], int] = 1,\n act: Optional[Union[Tuple, str]] = Act.PRELU,\n norm: Union[Tuple, str] = Norm.INSTANCE,\n bias: bool = False,\n conv_only: bool = True,\n is_transposed: bool = False,\n):\n padding = get_padding(kernel_size, stride)\n output_padding = None\n if is_transposed:\n output_padding = get_output_padding(kernel_size, stride, padding)\n return Convolution(\n spatial_dims,\n in_channels,\n out_channels,\n strides=stride,\n kernel_size=kernel_size,\n act=act,\n norm=norm,\n bias=bias,\n conv_only=conv_only,\n is_transposed=is_transposed,\n padding=padding,\n output_padding=output_padding,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_padding_get_padding.return.padding_if_len_padding_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_padding_get_padding.return.padding_if_len_padding_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/dynunet_block.py", "file_name": "dynunet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 272, "end_line": 284, "span_ids": ["get_padding"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_padding(\n kernel_size: Union[Sequence[int], int],\n stride: Union[Sequence[int], int],\n) -> Union[Tuple[int, ...], int]:\n\n kernel_size_np = np.atleast_1d(kernel_size)\n stride_np = np.atleast_1d(stride)\n padding_np = (kernel_size_np - stride_np + 1) / 2\n if np.min(padding_np) < 0:\n raise AssertionError(\"padding value should not be negative, please change the kernel size and/or stride.\")\n padding = tuple(int(p) for p in padding_np)\n\n return padding if len(padding) > 1 else padding[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_output_padding_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/dynunet_block.py_get_output_padding_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/dynunet_block.py", "file_name": "dynunet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 287, "end_line": 302, "span_ids": ["get_output_padding"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_output_padding(\n kernel_size: Union[Sequence[int], int],\n stride: Union[Sequence[int], int],\n padding: Union[Sequence[int], int],\n) -> Union[Tuple[int, ...], int]:\n kernel_size_np = np.atleast_1d(kernel_size)\n stride_np = np.atleast_1d(stride)\n padding_np = np.atleast_1d(padding)\n\n out_padding_np = 2 * padding_np + stride_np - kernel_size_np\n if np.min(out_padding_np) < 0:\n raise AssertionError(\"out_padding value should not be negative, please change the kernel size and/or stride.\")\n out_padding = tuple(int(p) for p in out_padding_np)\n\n return out_padding if len(out_padding) > 1 else out_padding[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/segresnet_block.py_get_conv_layer_get_upsample_layer.return.UpSample_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/segresnet_block.py_get_conv_layer_get_upsample_layer.return.UpSample_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/segresnet_block.py", "file_name": "segresnet_block.py", "file_type": "text/x-python", "category": "implementation", "start_line": 37, "end_line": 63, "span_ids": ["get_upsample_layer", "get_conv_layer"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_conv_layer(\n spatial_dims: int, in_channels: int, out_channels: int, kernel_size: int = 3, stride: int = 1, bias: bool = False\n):\n\n return Convolution(\n spatial_dims,\n in_channels,\n out_channels,\n strides=stride,\n kernel_size=kernel_size,\n bias=bias,\n conv_only=True,\n )\n\n\ndef get_upsample_layer(\n spatial_dims: int, in_channels: int, upsample_mode: Union[UpsampleMode, str] = \"nontrainable\", scale_factor: int = 2\n):\n return UpSample(\n dimensions=spatial_dims,\n in_channels=in_channels,\n out_channels=in_channels,\n scale_factor=scale_factor,\n mode=upsample_mode,\n interp_mode=InterpolateMode.LINEAR,\n align_corners=False,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_from_typing_import_Option_UpSample._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_from_typing_import_Option_UpSample._", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/upsample.py", "file_name": "upsample.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 35, "span_ids": ["UpSample", "docstring"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Optional, Sequence, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom monai.networks.layers.factories import Conv, Pad, Pool\nfrom monai.networks.utils import icnr_init, pixelshuffle\nfrom monai.utils import InterpolateMode, UpsampleMode, ensure_tuple_rep\n\n__all__ = [\"Upsample\", \"UpSample\", \"SubpixelUpsample\", \"Subpixelupsample\", \"SubpixelUpSample\"]\n\n\nclass UpSample(nn.Sequential):\n \"\"\"\n Upsamples data by `scale_factor`.\n Supported modes are:\n\n - \"deconv\": uses a transposed convolution.\n - \"nontrainable\": uses :py:class:`torch.nn.Upsample`.\n - \"pixelshuffle\": uses :py:class:`monai.networks.blocks.SubpixelUpsample`.\n\n This module can optionally take a pre-convolution\n (often used to map the number of features from `in_channels` to `out_channels`).\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_UpSample.__init___UpSample.__init__.if_up_mode_UpsampleMod.else_.raise_NotImplementedError": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_UpSample.__init___UpSample.__init__.if_up_mode_UpsampleMod.else_.raise_NotImplementedError", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/upsample.py", "file_name": "upsample.py", "file_type": "text/x-python", "category": "implementation", "start_line": 35, "end_line": 122, "span_ids": ["UpSample.__init__"], "tokens": 1020}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class UpSample(nn.Sequential):\n\n def __init__(\n self,\n dimensions: int,\n in_channels: Optional[int] = None,\n out_channels: Optional[int] = None,\n scale_factor: Union[Sequence[float], float] = 2,\n mode: Union[UpsampleMode, str] = UpsampleMode.DECONV,\n pre_conv: Optional[Union[nn.Module, str]] = \"default\",\n interp_mode: Union[InterpolateMode, str] = InterpolateMode.LINEAR,\n align_corners: Optional[bool] = True,\n bias: bool = True,\n apply_pad_pool: bool = True,\n ) -> None:\n \"\"\"\n Args:\n dimensions: number of spatial dimensions of the input image.\n in_channels: number of channels of the input image.\n out_channels: number of channels of the output image. Defaults to `in_channels`.\n scale_factor: multiplier for spatial size. Has to match input size if it is a tuple. Defaults to 2.\n mode: {``\"deconv\"``, ``\"nontrainable\"``, ``\"pixelshuffle\"``}. Defaults to ``\"deconv\"``.\n pre_conv: a conv block applied before upsampling. Defaults to None.\n When ``conv_block`` is ``\"default\"``, one reserved conv layer will be utilized when\n Only used in the \"nontrainable\" or \"pixelshuffle\" mode.\n interp_mode: {``\"nearest\"``, ``\"linear\"``, ``\"bilinear\"``, ``\"bicubic\"``, ``\"trilinear\"``}\n Only used when ``mode`` is ``UpsampleMode.NONTRAINABLE``.\n If ends with ``\"linear\"`` will use ``spatial dims`` to determine the correct interpolation.\n This corresponds to linear, bilinear, trilinear for 1D, 2D, and 3D respectively.\n The interpolation mode. Defaults to ``\"linear\"``.\n See also: https://pytorch.org/docs/stable/nn.html#upsample\n align_corners: set the align_corners parameter of `torch.nn.Upsample`. Defaults to True.\n Only used in the nontrainable mode.\n bias: whether to have a bias term in the default preconv and deconv layers. Defaults to True.\n apply_pad_pool: if True the upsampled tensor is padded then average pooling is applied with a kernel the\n size of `scale_factor` with a stride of 1. See also: :py:class:`monai.networks.blocks.SubpixelUpsample`.\n Only used in the pixelshuffle mode.\n \"\"\"\n super().__init__()\n scale_factor_ = ensure_tuple_rep(scale_factor, dimensions)\n up_mode = UpsampleMode(mode)\n if up_mode == UpsampleMode.DECONV:\n if not in_channels:\n raise ValueError(f\"in_channels needs to be specified in the '{mode}' mode.\")\n self.add_module(\n \"deconv\",\n Conv[Conv.CONVTRANS, dimensions](\n in_channels=in_channels,\n out_channels=out_channels or in_channels,\n kernel_size=scale_factor_,\n stride=scale_factor_,\n bias=bias,\n ),\n )\n elif up_mode == UpsampleMode.NONTRAINABLE:\n if pre_conv == \"default\" and (out_channels != in_channels): # defaults to no conv if out_chns==in_chns\n if not in_channels:\n raise ValueError(f\"in_channels needs to be specified in the '{mode}' mode.\")\n self.add_module(\n \"preconv\",\n Conv[Conv.CONV, dimensions](\n in_channels=in_channels, out_channels=out_channels or in_channels, kernel_size=1, bias=bias\n ),\n )\n elif pre_conv is not None and pre_conv != \"default\":\n self.add_module(\"preconv\", pre_conv) # type: ignore\n\n interp_mode = InterpolateMode(interp_mode)\n linear_mode = [InterpolateMode.LINEAR, InterpolateMode.BILINEAR, InterpolateMode.TRILINEAR]\n if interp_mode in linear_mode: # choose mode based on dimensions\n interp_mode = linear_mode[dimensions - 1]\n self.add_module(\n \"upsample_non_trainable\",\n nn.Upsample(scale_factor=scale_factor_, mode=interp_mode.value, align_corners=align_corners),\n )\n elif up_mode == UpsampleMode.PIXELSHUFFLE:\n self.add_module(\n \"pixelshuffle\",\n SubpixelUpsample(\n dimensions=dimensions,\n in_channels=in_channels,\n out_channels=out_channels,\n scale_factor=scale_factor_[0], # isotropic\n conv_block=pre_conv,\n apply_pad_pool=apply_pad_pool,\n bias=bias,\n ),\n )\n else:\n raise NotImplementedError(f\"Unsupported upsampling mode {mode}.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample_SubpixelUpsample._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample_SubpixelUpsample._", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/upsample.py", "file_name": "upsample.py", "file_type": "text/x-python", "category": "implementation", "start_line": 125, "end_line": 148, "span_ids": ["SubpixelUpsample"], "tokens": 265}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SubpixelUpsample(nn.Module):\n \"\"\"\n Upsample via using a subpixel CNN. This module supports 1D, 2D and 3D input images.\n The module is consisted with two parts. First of all, a convolutional layer is employed\n to increase the number of channels into: ``in_channels * (scale_factor ** dimensions)``.\n Secondly, a pixel shuffle manipulation is utilized to aggregates the feature maps from\n low resolution space and build the super resolution space.\n The first part of the module is not fixed, a sequential layers can be used to replace the\n default single layer.\n\n See: Shi et al., 2016, \"Real-Time Single Image and Video Super-Resolution\n Using a nEfficient Sub-Pixel Convolutional Neural Network.\"\n\n See: Aitken et al., 2017, \"Checkerboard artifact free sub-pixel convolution\".\n\n The idea comes from:\n https://arxiv.org/abs/1609.05158\n\n The pixel shuffle mechanism refers to:\n https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/native/PixelShuffle.cpp\n and:\n https://github.com/pytorch/pytorch/pull/6340/files\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample.__init___SubpixelUpsample.__init__.if_apply_pad_pool_.self.pad_pool.nn_Sequential_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample.__init___SubpixelUpsample.__init__.if_apply_pad_pool_.self.pad_pool.nn_Sequential_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/upsample.py", "file_name": "upsample.py", "file_type": "text/x-python", "category": "implementation", "start_line": 150, "end_line": 209, "span_ids": ["SubpixelUpsample.__init__"], "tokens": 572}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SubpixelUpsample(nn.Module):\n\n def __init__(\n self,\n dimensions: int,\n in_channels: Optional[int],\n out_channels: Optional[int] = None,\n scale_factor: int = 2,\n conv_block: Optional[Union[nn.Module, str]] = \"default\",\n apply_pad_pool: bool = True,\n bias: bool = True,\n ) -> None:\n \"\"\"\n Args:\n dimensions: number of spatial dimensions of the input image.\n in_channels: number of channels of the input image.\n out_channels: optional number of channels of the output image.\n scale_factor: multiplier for spatial size. Defaults to 2.\n conv_block: a conv block to extract feature maps before upsampling. Defaults to None.\n\n - When ``conv_block`` is ``\"default\"``, one reserved conv layer will be utilized.\n - When ``conv_block`` is an ``nn.module``,\n please ensure the output number of channels is divisible ``(scale_factor ** dimensions)``.\n\n apply_pad_pool: if True the upsampled tensor is padded then average pooling is applied with a kernel the\n size of `scale_factor` with a stride of 1. This implements the nearest neighbour resize convolution\n component of subpixel convolutions described in Aitken et al.\n bias: whether to have a bias term in the default conv_block. Defaults to True.\n \"\"\"\n super().__init__()\n\n if scale_factor <= 0:\n raise ValueError(f\"The `scale_factor` multiplier must be an integer greater than 0, got {scale_factor}.\")\n\n self.dimensions = dimensions\n self.scale_factor = scale_factor\n\n if conv_block == \"default\":\n out_channels = out_channels or in_channels\n if not out_channels:\n raise ValueError(\"in_channels need to be specified.\")\n conv_out_channels = out_channels * (scale_factor ** dimensions)\n self.conv_block = Conv[Conv.CONV, dimensions](\n in_channels=in_channels, out_channels=conv_out_channels, kernel_size=3, stride=1, padding=1, bias=bias\n )\n\n icnr_init(self.conv_block, self.scale_factor)\n elif conv_block is None:\n self.conv_block = nn.Identity()\n else:\n self.conv_block = conv_block\n\n self.pad_pool: nn.Module = nn.Identity()\n\n if apply_pad_pool:\n pool_type = Pool[Pool.AVG, self.dimensions]\n pad_type = Pad[Pad.CONSTANTPAD, self.dimensions]\n\n self.pad_pool = nn.Sequential(\n pad_type(padding=(self.scale_factor - 1, 0) * self.dimensions, value=0.0),\n pool_type(kernel_size=self.scale_factor, stride=1),\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample.forward_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/blocks/upsample.py_SubpixelUpsample.forward_", "embedding": null, "metadata": {"file_path": "monai/networks/blocks/upsample.py", "file_name": "upsample.py", "file_type": "text/x-python", "category": "implementation", "start_line": 213, "end_line": 232, "span_ids": ["SubpixelUpsample.forward", "impl:3"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class SubpixelUpsample(nn.Module):\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n x: Tensor in shape (batch, channel, spatial_1[, spatial_2, ...).\n \"\"\"\n x = self.conv_block(x)\n if x.shape[1] % (self.scale_factor ** self.dimensions) != 0:\n raise ValueError(\n f\"Number of channels after `conv_block` ({x.shape[1]}) must be evenly \"\n \"divisible by scale_factor ** dimensions \"\n f\"({self.scale_factor}^{self.dimensions}={self.scale_factor**self.dimensions}).\"\n )\n x = pixelshuffle(x, self.dimensions, self.scale_factor)\n x = self.pad_pool(x)\n return x\n\n\nUpsample = UpSample\nSubpixelupsample = SubpixelUpSample = SubpixelUpsample", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_from_typing_import_List__same_padding.return.padding_if_len_padding_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_from_typing_import_List__same_padding.return.padding_if_len_padding_", "embedding": null, "metadata": {"file_path": "monai/networks/layers/convutils.py", "file_name": "convutils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 43, "span_ids": ["same_padding", "docstring"], "tokens": 295}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import List, Optional, Sequence, Tuple, Union\n\nimport numpy as np\nimport torch\n\n__all__ = [\"same_padding\", \"stride_minus_kernel_padding\", \"calculate_out_shape\", \"gaussian_1d\", \"polyval\"]\n\n\ndef same_padding(\n kernel_size: Union[Sequence[int], int], dilation: Union[Sequence[int], int] = 1\n) -> Union[Tuple[int, ...], int]:\n \"\"\"\n Return the padding value needed to ensure a convolution using the given kernel size produces an output of the same\n shape as the input for a stride of 1, otherwise ensure a shape of the input divided by the stride rounded down.\n\n Raises:\n NotImplementedError: When ``np.any((kernel_size - 1) * dilation % 2 == 1)``.\n\n \"\"\"\n\n kernel_size_np = np.atleast_1d(kernel_size)\n dilation_np = np.atleast_1d(dilation)\n\n if np.any((kernel_size_np - 1) * dilation % 2 == 1):\n raise NotImplementedError(\n f\"Same padding not available for kernel_size={kernel_size_np} and dilation={dilation_np}.\"\n )\n\n padding_np = (kernel_size_np - 1) / 2 * dilation_np\n padding = tuple(int(p) for p in padding_np)\n\n return padding if len(padding) > 1 else padding[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_stride_minus_kernel_padding_stride_minus_kernel_padding.return.out_padding_if_len_out_pa": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_stride_minus_kernel_padding_stride_minus_kernel_padding.return.out_padding_if_len_out_pa", "embedding": null, "metadata": {"file_path": "monai/networks/layers/convutils.py", "file_name": "convutils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 46, "end_line": 56, "span_ids": ["stride_minus_kernel_padding"], "tokens": 110}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def stride_minus_kernel_padding(\n kernel_size: Union[Sequence[int], int],\n stride: Union[Sequence[int], int],\n) -> Union[Tuple[int, ...], int]:\n kernel_size_np = np.atleast_1d(kernel_size)\n stride_np = np.atleast_1d(stride)\n\n out_padding_np = stride_np - kernel_size_np\n out_padding = tuple(int(p) for p in out_padding_np)\n\n return out_padding if len(out_padding) > 1 else out_padding[0]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_gaussian_1d_gaussian_1d._type_ignore": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_gaussian_1d_gaussian_1d._type_ignore", "embedding": null, "metadata": {"file_path": "monai/networks/layers/convutils.py", "file_name": "convutils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 81, "end_line": 134, "span_ids": ["gaussian_1d"], "tokens": 608}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def gaussian_1d(\n sigma: torch.Tensor, truncated: float = 4.0, approx: str = \"erf\", normalize: bool = False\n) -> torch.Tensor:\n \"\"\"\n one dimensional Gaussian kernel.\n\n Args:\n sigma: std of the kernel\n truncated: tail length\n approx: discrete Gaussian kernel type, available options are \"erf\", \"sampled\", and \"scalespace\".\n\n - ``erf`` approximation interpolates the error function;\n - ``sampled`` uses a sampled Gaussian kernel;\n - ``scalespace`` corresponds to\n https://en.wikipedia.org/wiki/Scale_space_implementation#The_discrete_Gaussian_kernel\n based on the modified Bessel functions.\n\n normalize: whether to normalize the kernel with `kernel.sum()`.\n\n Raises:\n ValueError: When ``truncated`` is non-positive.\n\n Returns:\n 1D torch tensor\n\n \"\"\"\n sigma = torch.as_tensor(sigma, dtype=torch.float, device=sigma.device if isinstance(sigma, torch.Tensor) else None)\n device = sigma.device\n if truncated <= 0.0:\n raise ValueError(f\"truncated must be positive, got {truncated}.\")\n tail = int(max(float(sigma) * truncated, 0.5) + 0.5)\n if approx.lower() == \"erf\":\n x = torch.arange(-tail, tail + 1, dtype=torch.float, device=device)\n t = 0.70710678 / torch.abs(sigma)\n out = 0.5 * ((t * (x + 0.5)).erf() - (t * (x - 0.5)).erf())\n out = out.clamp(min=0)\n elif approx.lower() == \"sampled\":\n x = torch.arange(-tail, tail + 1, dtype=torch.float, device=sigma.device)\n out = torch.exp(-0.5 / (sigma * sigma) * x ** 2)\n if not normalize: # compute the normalizer\n out = out / (2.5066282 * sigma)\n elif approx.lower() == \"scalespace\":\n sigma2 = sigma * sigma\n out_pos: List[Optional[torch.Tensor]] = [None] * (tail + 1)\n out_pos[0] = _modified_bessel_0(sigma2)\n out_pos[1] = _modified_bessel_1(sigma2)\n for k in range(2, len(out_pos)):\n out_pos[k] = _modified_bessel_i(k, sigma2)\n out = out_pos[:0:-1]\n out.extend(out_pos)\n out = torch.stack(out) * torch.exp(-sigma2)\n else:\n raise NotImplementedError(f\"Unsupported option: approx='{approx}'.\")\n return out / out.sum() if normalize else out # type: ignore", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_polyval_polyval._type_ignore": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py_polyval_polyval._type_ignore", "embedding": null, "metadata": {"file_path": "monai/networks/layers/convutils.py", "file_name": "convutils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 137, "end_line": 160, "span_ids": ["polyval"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def polyval(coef, x) -> torch.Tensor:\n \"\"\"\n Evaluates the polynomial defined by `coef` at `x`.\n\n For a 1D sequence of coef (length n), evaluate::\n\n y = coef[n-1] + x * (coef[n-2] + ... + x * (coef[1] + x * coef[0]))\n\n Args:\n coef: a sequence of floats representing the coefficients of the polynomial\n x: float or a sequence of floats representing the variable of the polynomial\n\n Returns:\n 1D torch tensor\n \"\"\"\n device = x.device if isinstance(x, torch.Tensor) else None\n coef = torch.as_tensor(coef, dtype=torch.float, device=device)\n if coef.ndim == 0 or (len(coef) < 1):\n return torch.zeros(x.shape)\n x = torch.as_tensor(x, dtype=torch.float, device=device)\n ans = coef[0]\n for c in coef[1:]:\n ans = ans * x + c\n return ans # type: ignore", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_0__modified_bessel_0.return.polyval__coef_y_torch": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_0__modified_bessel_0.return.polyval__coef_y_torch", "embedding": null, "metadata": {"file_path": "monai/networks/layers/convutils.py", "file_name": "convutils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 163, "end_line": 181, "span_ids": ["_modified_bessel_0"], "tokens": 258}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _modified_bessel_0(x: torch.Tensor) -> torch.Tensor:\n x = torch.as_tensor(x, dtype=torch.float, device=x.device if isinstance(x, torch.Tensor) else None)\n if torch.abs(x) < 3.75:\n y = x * x / 14.0625\n return polyval([0.45813e-2, 0.360768e-1, 0.2659732, 1.2067492, 3.0899424, 3.5156229, 1.0], y)\n ax = torch.abs(x)\n y = 3.75 / ax\n _coef = [\n 0.392377e-2,\n -0.1647633e-1,\n 0.2635537e-1,\n -0.2057706e-1,\n 0.916281e-2,\n -0.157565e-2,\n 0.225319e-2,\n 0.1328592e-1,\n 0.39894228,\n ]\n return polyval(_coef, y) * torch.exp(ax) / torch.sqrt(ax)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_1__modified_bessel_1.return._ans_if_x_0_0_else_ans": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_1__modified_bessel_1.return._ans_if_x_0_0_else_ans", "embedding": null, "metadata": {"file_path": "monai/networks/layers/convutils.py", "file_name": "convutils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 184, "end_line": 204, "span_ids": ["_modified_bessel_1"], "tokens": 288}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _modified_bessel_1(x: torch.Tensor) -> torch.Tensor:\n x = torch.as_tensor(x, dtype=torch.float, device=x.device if isinstance(x, torch.Tensor) else None)\n if torch.abs(x) < 3.75:\n y = x * x / 14.0625\n _coef = [0.32411e-3, 0.301532e-2, 0.2658733e-1, 0.15084934, 0.51498869, 0.87890594, 0.5]\n return torch.abs(x) * polyval(_coef, y)\n ax = torch.abs(x)\n y = 3.75 / ax\n _coef = [\n -0.420059e-2,\n 0.1787654e-1,\n -0.2895312e-1,\n 0.2282967e-1,\n -0.1031555e-1,\n 0.163801e-2,\n -0.362018e-2,\n -0.3988024e-1,\n 0.39894228,\n ]\n ans = polyval(_coef, y) * torch.exp(ax) / torch.sqrt(ax)\n return -ans if x < 0.0 else ans", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_i_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/convutils.py__modified_bessel_i_", "embedding": null, "metadata": {"file_path": "monai/networks/layers/convutils.py", "file_name": "convutils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 207, "end_line": 229, "span_ids": ["_modified_bessel_i"], "tokens": 300}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def _modified_bessel_i(n: int, x: torch.Tensor) -> torch.Tensor:\n if n < 2:\n raise ValueError(f\"n must be greater than 1, got n={n}.\")\n x = torch.as_tensor(x, dtype=torch.float, device=x.device if isinstance(x, torch.Tensor) else None)\n if x == 0.0:\n return x\n device = x.device\n tox = 2.0 / torch.abs(x)\n ans, bip, bi = torch.tensor(0.0, device=device), torch.tensor(0.0, device=device), torch.tensor(1.0, device=device)\n m = int(2 * (n + np.floor(np.sqrt(40.0 * n))))\n for j in range(m, 0, -1):\n bim = bip + float(j) * tox * bi\n bip = bi\n bi = bim\n if abs(bi) > 1.0e10:\n ans = ans * 1.0e-10\n bi = bi * 1.0e-10\n bip = bip * 1.0e-10\n if j == n:\n ans = bip\n ans = ans * _modified_bessel_0(x) / bi\n return -ans if x < 0.0 and (n % 2) == 1 else ans", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_GaussianFilter_GaussianFilter.forward.return.separable_filtering_x_x_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/simplelayers.py_GaussianFilter_GaussianFilter.forward.return.separable_filtering_x_x_", "embedding": null, "metadata": {"file_path": "monai/networks/layers/simplelayers.py", "file_name": "simplelayers.py", "file_type": "text/x-python", "category": "implementation", "start_line": 367, "end_line": 414, "span_ids": ["GaussianFilter", "GaussianFilter.__init__", "GaussianFilter.forward"], "tokens": 466}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianFilter(nn.Module):\n def __init__(\n self,\n spatial_dims: int,\n sigma: Union[Sequence[float], float, Sequence[torch.Tensor], torch.Tensor],\n truncated: float = 4.0,\n approx: str = \"erf\",\n requires_grad: bool = False,\n ) -> None:\n \"\"\"\n Args:\n spatial_dims: number of spatial dimensions of the input image.\n must have shape (Batch, channels, H[, W, ...]).\n sigma: std. could be a single value, or `spatial_dims` number of values.\n truncated: spreads how many stds.\n approx: discrete Gaussian kernel type, available options are \"erf\", \"sampled\", and \"scalespace\".\n\n - ``erf`` approximation interpolates the error function;\n - ``sampled`` uses a sampled Gaussian kernel;\n - ``scalespace`` corresponds to\n https://en.wikipedia.org/wiki/Scale_space_implementation#The_discrete_Gaussian_kernel\n based on the modified Bessel functions.\n\n requires_grad: whether to store the gradients for sigma.\n if True, `sigma` will be the initial value of the parameters of this module\n (for example `parameters()` iterator could be used to get the parameters);\n otherwise this module will fix the kernels using `sigma` as the std.\n \"\"\"\n super().__init__()\n self.sigma = [\n torch.nn.Parameter(\n torch.as_tensor(s, dtype=torch.float, device=s.device if isinstance(s, torch.Tensor) else None),\n requires_grad=requires_grad,\n )\n for s in ensure_tuple_rep(sigma, int(spatial_dims))\n ]\n self.truncated = truncated\n self.approx = approx\n for idx, param in enumerate(self.sigma):\n self.register_parameter(f\"kernel_sigma_{idx}\", param)\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n \"\"\"\n Args:\n x: in shape [Batch, chns, H, W, D].\n \"\"\"\n _kernel = [gaussian_1d(s, truncated=self.truncated, approx=self.approx) for s in self.sigma]\n return separable_filtering(x=x, kernels=_kernel)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_pull_grid_pull.return._GridPull_apply_input_gr": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_pull_grid_pull.return._GridPull_apply_input_gr", "embedding": null, "metadata": {"file_path": "monai/networks/layers/spatial_transforms.py", "file_name": "spatial_transforms.py", "file_type": "text/x-python", "category": "implementation", "start_line": 49, "end_line": 116, "span_ids": ["grid_pull"], "tokens": 795}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def grid_pull(input: torch.Tensor, grid: torch.Tensor, interpolation=\"linear\", bound=\"zero\", extrapolate: bool = True):\n \"\"\"\n Sample an image with respect to a deformation field.\n\n `interpolation` can be an int, a string or an InterpolationType.\n Possible values are::\n\n - 0 or 'nearest' or InterpolationType.nearest\n - 1 or 'linear' or InterpolationType.linear\n - 2 or 'quadratic' or InterpolationType.quadratic\n - 3 or 'cubic' or InterpolationType.cubic\n - 4 or 'fourth' or InterpolationType.fourth\n - 5 or 'fifth' or InterpolationType.fifth\n - 6 or 'sixth' or InterpolationType.sixth\n - 7 or 'seventh' or InterpolationType.seventh\n\n A list of values can be provided, in the order [W, H, D],\n to specify dimension-specific interpolation orders.\n\n `bound` can be an int, a string or a BoundType.\n Possible values are::\n\n - 0 or 'replicate' or 'nearest' or BoundType.replicate\n - 1 or 'dct1' or 'mirror' or BoundType.dct1\n - 2 or 'dct2' or 'reflect' or BoundType.dct2\n - 3 or 'dst1' or 'antimirror' or BoundType.dst1\n - 4 or 'dst2' or 'antireflect' or BoundType.dst2\n - 5 or 'dft' or 'wrap' or BoundType.dft\n - 7 or 'zero' or BoundType.zero\n\n A list of values can be provided, in the order [W, H, D],\n to specify dimension-specific boundary conditions.\n `sliding` is a specific condition than only applies to flow fields\n (with as many channels as dimensions). It cannot be dimension-specific.\n Note that:\n\n - `dft` corresponds to circular padding\n - `dct2` corresponds to Neumann boundary conditions (symmetric)\n - `dst2` corresponds to Dirichlet boundary conditions (antisymmetric)\n\n See Also:\n - https://en.wikipedia.org/wiki/Discrete_cosine_transform\n - https://en.wikipedia.org/wiki/Discrete_sine_transform\n - ``help(monai._C.BoundType)``\n - ``help(monai._C.InterpolationType)``\n\n Args:\n input: Input image. `(B, C, Wi, Hi, Di)`.\n grid: Deformation field. `(B, Wo, Ho, Do, 1|2|3)`.\n interpolation (int or list[int] , optional): Interpolation order.\n Defaults to `'linear'`.\n bound (BoundType, or list[BoundType], optional): Boundary conditions.\n Defaults to `'zero'`.\n extrapolate: Extrapolate out-of-bound data.\n Defaults to `True`.\n\n Returns:\n output (torch.Tensor): Deformed image `(B, C, Wo, Ho, Do)`.\n\n \"\"\"\n # Convert parameters\n bound = [_C.BoundType.__members__[b] if isinstance(b, str) else _C.BoundType(b) for b in ensure_tuple(bound)]\n interpolation = [\n _C.InterpolationType.__members__[i] if isinstance(i, str) else _C.InterpolationType(i)\n for i in ensure_tuple(interpolation)\n ]\n\n return _GridPull.apply(input, grid, interpolation, bound, extrapolate)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_count_grid_count.return._GridCount_apply_grid_sh": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_count_grid_count.return._GridCount_apply_grid_sh", "embedding": null, "metadata": {"file_path": "monai/networks/layers/spatial_transforms.py", "file_name": "spatial_transforms.py", "file_type": "text/x-python", "category": "implementation", "start_line": 240, "end_line": 313, "span_ids": ["grid_count"], "tokens": 822}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def grid_count(grid: torch.Tensor, shape=None, interpolation=\"linear\", bound=\"zero\", extrapolate: bool = True):\n \"\"\"\n Splatting weights with respect to a deformation field (pull adjoint).\n\n This function is equivalent to applying grid_push to an image of ones.\n\n `interpolation` can be an int, a string or an InterpolationType.\n Possible values are::\n\n - 0 or 'nearest' or InterpolationType.nearest\n - 1 or 'linear' or InterpolationType.linear\n - 2 or 'quadratic' or InterpolationType.quadratic\n - 3 or 'cubic' or InterpolationType.cubic\n - 4 or 'fourth' or InterpolationType.fourth\n - 5 or 'fifth' or InterpolationType.fifth\n - 6 or 'sixth' or InterpolationType.sixth\n - 7 or 'seventh' or InterpolationType.seventh\n\n A list of values can be provided, in the order [W, H, D],\n to specify dimension-specific interpolation orders.\n\n `bound` can be an int, a string or a BoundType.\n Possible values are::\n\n - 0 or 'replicate' or 'nearest' or BoundType.replicate\n - 1 or 'dct1' or 'mirror' or BoundType.dct1\n - 2 or 'dct2' or 'reflect' or BoundType.dct2\n - 3 or 'dst1' or 'antimirror' or BoundType.dst1\n - 4 or 'dst2' or 'antireflect' or BoundType.dst2\n - 5 or 'dft' or 'wrap' or BoundType.dft\n - 7 or 'zero' or BoundType.zero\n\n A list of values can be provided, in the order [W, H, D],\n to specify dimension-specific boundary conditions.\n `sliding` is a specific condition than only applies to flow fields\n (with as many channels as dimensions). It cannot be dimension-specific.\n Note that:\n\n - `dft` corresponds to circular padding\n - `dct2` corresponds to Neumann boundary conditions (symmetric)\n - `dst2` corresponds to Dirichlet boundary conditions (antisymmetric)\n\n See Also:\n\n - https://en.wikipedia.org/wiki/Discrete_cosine_transform\n - https://en.wikipedia.org/wiki/Discrete_sine_transform\n - ``help(monai._C.BoundType)``\n - ``help(monai._C.InterpolationType)``\n\n Args:\n grid: Deformation field `(B, Wi, Hi, Di, 2|3)`.\n shape: shape of the source image.\n interpolation (int or list[int] , optional): Interpolation order.\n Defaults to `'linear'`.\n bound (BoundType, or list[BoundType], optional): Boundary conditions.\n Defaults to `'zero'`.\n extrapolate (bool, optional): Extrapolate out-of-bound data.\n Defaults to `True`.\n\n Returns:\n output (torch.Tensor): Splat weights `(B, 1, Wo, Ho, Do)`.\n\n \"\"\"\n # Convert parameters\n bound = [_C.BoundType.__members__[b] if isinstance(b, str) else _C.BoundType(b) for b in ensure_tuple(bound)]\n interpolation = [\n _C.InterpolationType.__members__[i] if isinstance(i, str) else _C.InterpolationType(i)\n for i in ensure_tuple(interpolation)\n ]\n\n if shape is None:\n shape = tuple(grid.shape[2:])\n\n return _GridCount.apply(grid, shape, interpolation, bound, extrapolate)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_grad_grid_grad.return._GridGrad_apply_input_gr": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_grid_grad_grid_grad.return._GridGrad_apply_input_gr", "embedding": null, "metadata": {"file_path": "monai/networks/layers/spatial_transforms.py", "file_name": "spatial_transforms.py", "file_type": "text/x-python", "category": "implementation", "start_line": 340, "end_line": 408, "span_ids": ["grid_grad"], "tokens": 798}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def grid_grad(input: torch.Tensor, grid: torch.Tensor, interpolation=\"linear\", bound=\"zero\", extrapolate: bool = True):\n \"\"\"\n Sample an image with respect to a deformation field.\n\n `interpolation` can be an int, a string or an InterpolationType.\n Possible values are::\n\n - 0 or 'nearest' or InterpolationType.nearest\n - 1 or 'linear' or InterpolationType.linear\n - 2 or 'quadratic' or InterpolationType.quadratic\n - 3 or 'cubic' or InterpolationType.cubic\n - 4 or 'fourth' or InterpolationType.fourth\n - 5 or 'fifth' or InterpolationType.fifth\n - 6 or 'sixth' or InterpolationType.sixth\n - 7 or 'seventh' or InterpolationType.seventh\n\n A list of values can be provided, in the order [W, H, D],\n to specify dimension-specific interpolation orders.\n\n `bound` can be an int, a string or a BoundType.\n Possible values are::\n\n - 0 or 'replicate' or 'nearest' or BoundType.replicate\n - 1 or 'dct1' or 'mirror' or BoundType.dct1\n - 2 or 'dct2' or 'reflect' or BoundType.dct2\n - 3 or 'dst1' or 'antimirror' or BoundType.dst1\n - 4 or 'dst2' or 'antireflect' or BoundType.dst2\n - 5 or 'dft' or 'wrap' or BoundType.dft\n - 7 or 'zero' or BoundType.zero\n\n A list of values can be provided, in the order [W, H, D],\n to specify dimension-specific boundary conditions.\n `sliding` is a specific condition than only applies to flow fields\n (with as many channels as dimensions). It cannot be dimension-specific.\n Note that:\n\n - `dft` corresponds to circular padding\n - `dct2` corresponds to Neumann boundary conditions (symmetric)\n - `dst2` corresponds to Dirichlet boundary conditions (antisymmetric)\n\n See Also:\n\n - https://en.wikipedia.org/wiki/Discrete_cosine_transform\n - https://en.wikipedia.org/wiki/Discrete_sine_transform\n - ``help(monai._C.BoundType)``\n - ``help(monai._C.InterpolationType)``\n\n\n Args:\n input: Input image. `(B, C, Wi, Hi, Di)`.\n grid: Deformation field. `(B, Wo, Ho, Do, 2|3)`.\n interpolation (int or list[int] , optional): Interpolation order.\n Defaults to `'linear'`.\n bound (BoundType, or list[BoundType], optional): Boundary conditions.\n Defaults to `'zero'`.\n extrapolate: Extrapolate out-of-bound data. Defaults to `True`.\n\n Returns:\n output (torch.Tensor): Sampled gradients (B, C, Wo, Ho, Do, 1|2|3).\n\n \"\"\"\n # Convert parameters\n bound = [_C.BoundType.__members__[b] if isinstance(b, str) else _C.BoundType(b) for b in ensure_tuple(bound)]\n interpolation = [\n _C.InterpolationType.__members__[i] if isinstance(i, str) else _C.InterpolationType(i)\n for i in ensure_tuple(interpolation)\n ]\n\n return _GridGrad.apply(input, grid, interpolation, bound, extrapolate)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_AffineTransform_AffineTransform.__init__.self.reverse_indexing.reverse_indexing": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/layers/spatial_transforms.py_AffineTransform_AffineTransform.__init__.self.reverse_indexing.reverse_indexing", "embedding": null, "metadata": {"file_path": "monai/networks/layers/spatial_transforms.py", "file_name": "spatial_transforms.py", "file_type": "text/x-python", "category": "implementation", "start_line": 404, "end_line": 454, "span_ids": ["AffineTransform.__init__", "AffineTransform"], "tokens": 698}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AffineTransform(nn.Module):\n def __init__(\n self,\n spatial_size: Optional[Union[Sequence[int], int]] = None,\n normalized: bool = False,\n mode: Union[GridSampleMode, str] = GridSampleMode.BILINEAR,\n padding_mode: Union[GridSamplePadMode, str] = GridSamplePadMode.ZEROS,\n align_corners: bool = False,\n reverse_indexing: bool = True,\n ) -> None:\n \"\"\"\n Apply affine transformations with a batch of affine matrices.\n\n When `normalized=False` and `reverse_indexing=True`,\n it does the commonly used resampling in the 'pull' direction\n following the ``scipy.ndimage.affine_transform`` convention.\n In this case `theta` is equivalent to (ndim+1, ndim+1) input ``matrix`` of ``scipy.ndimage.affine_transform``,\n operates on homogeneous coordinates.\n See also: https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.affine_transform.html\n\n When `normalized=True` and `reverse_indexing=False`,\n it applies `theta` to the normalized coordinates (coords. in the range of [-1, 1]) directly.\n This is often used with `align_corners=False` to achieve resolution-agnostic resampling,\n thus useful as a part of trainable modules such as the spatial transformer networks.\n See also: https://pytorch.org/tutorials/intermediate/spatial_transformer_tutorial.html\n\n Args:\n spatial_size: output spatial shape, the full output shape will be\n `[N, C, *spatial_size]` where N and C are inferred from the `src` input of `self.forward`.\n normalized: indicating whether the provided affine matrix `theta` is defined\n for the normalized coordinates. If `normalized=False`, `theta` will be converted\n to operate on normalized coordinates as pytorch affine_grid works with the normalized\n coordinates.\n mode: {``\"bilinear\"``, ``\"nearest\"``}\n Interpolation mode to calculate output values. Defaults to ``\"bilinear\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n padding_mode: {``\"zeros\"``, ``\"border\"``, ``\"reflection\"``}\n Padding mode for outside grid values. Defaults to ``\"zeros\"``.\n See also: https://pytorch.org/docs/stable/nn.functional.html#grid-sample\n align_corners: see also https://pytorch.org/docs/stable/nn.functional.html#grid-sample.\n reverse_indexing: whether to reverse the spatial indexing of image and coordinates.\n set to `False` if `theta` follows pytorch's default \"D, H, W\" convention.\n set to `True` if `theta` follows `scipy.ndimage` default \"i, j, k\" convention.\n \"\"\"\n super().__init__()\n self.spatial_size = ensure_tuple(spatial_size) if spatial_size is not None else None\n self.normalized = normalized\n self.mode: GridSampleMode = GridSampleMode(mode)\n self.padding_mode: GridSamplePadMode = GridSamplePadMode(padding_mode)\n self.align_corners = align_corners\n self.reverse_indexing = reverse_indexing", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet.__init___AHNet.__init__.if_pretrained_.self_copy_from_net2d_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/ahnet.py_AHNet.__init___AHNet.__init__.if_pretrained_.self_copy_from_net2d_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/ahnet.py", "file_name": "ahnet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 344, "end_line": 451, "span_ids": ["AHNet.__init__"], "tokens": 1374}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class AHNet(nn.Module):\n\n def __init__(\n self,\n layers: tuple = (3, 4, 6, 3),\n spatial_dims: int = 3,\n in_channels: int = 1,\n out_channels: int = 1,\n psp_block_num: int = 4,\n upsample_mode: str = \"transpose\",\n pretrained: bool = False,\n progress: bool = True,\n ):\n self.inplanes = 64\n super(AHNet, self).__init__()\n\n conv_type = Conv[Conv.CONV, spatial_dims]\n conv_trans_type = Conv[Conv.CONVTRANS, spatial_dims]\n norm_type = Norm[Norm.BATCH, spatial_dims]\n pool_type: Type[Union[nn.MaxPool2d, nn.MaxPool3d]] = Pool[Pool.MAX, spatial_dims]\n relu_type: Type[nn.ReLU] = Act[Act.RELU]\n conv2d_type: Type[nn.Conv2d] = Conv[Conv.CONV, 2]\n norm2d_type: Type[nn.BatchNorm2d] = Norm[Norm.BATCH, 2]\n\n self.conv2d_type = conv2d_type\n self.norm2d_type = norm2d_type\n self.conv_type = conv_type\n self.norm_type = norm_type\n self.relu_type = relu_type\n self.pool_type = pool_type\n self.spatial_dims = spatial_dims\n self.psp_block_num = psp_block_num\n self.psp = None\n\n if spatial_dims not in [2, 3]:\n raise AssertionError(\"spatial_dims can only be 2 or 3.\")\n if psp_block_num not in [0, 1, 2, 3, 4]:\n raise AssertionError(\"psp_block_num should be an integer that belongs to [0, 4].\")\n\n self.conv1 = conv_type(\n in_channels,\n 64,\n kernel_size=(7, 7, 3)[-spatial_dims:],\n stride=(2, 2, 1)[-spatial_dims:],\n padding=(3, 3, 1)[-spatial_dims:],\n bias=False,\n )\n self.pool1 = pool_type(kernel_size=(1, 1, 2)[-spatial_dims:], stride=(1, 1, 2)[-spatial_dims:])\n self.bn0 = norm_type(64)\n self.relu = relu_type(inplace=True)\n if upsample_mode in [\"transpose\", \"nearest\"]:\n \"\"\"\n To maintain the determinism, the value of kernel_size and stride should be the same.\n (you can check this link for reference: https://github.com/Project-MONAI/MONAI/pull/815 )\n \"\"\"\n self.maxpool = pool_type(kernel_size=(2, 2, 2)[-spatial_dims:], stride=2)\n else:\n self.maxpool = pool_type(kernel_size=(3, 3, 3)[-spatial_dims:], stride=2, padding=1)\n\n self.layer1 = self._make_layer(Bottleneck3x3x1, 64, layers[0], stride=1)\n self.layer2 = self._make_layer(Bottleneck3x3x1, 128, layers[1], stride=2)\n self.layer3 = self._make_layer(Bottleneck3x3x1, 256, layers[2], stride=2)\n self.layer4 = self._make_layer(Bottleneck3x3x1, 512, layers[3], stride=2)\n\n # Make the 3D dense decoder layers\n densegrowth = 20\n densebn = 4\n ndenselayer = 3\n\n num_init_features = 64\n noutres1 = 256\n noutres2 = 512\n noutres3 = 1024\n noutres4 = 2048\n\n self.up0 = UpTransition(spatial_dims, noutres4, noutres3, upsample_mode)\n self.dense0 = DenseBlock(spatial_dims, ndenselayer, noutres3, densebn, densegrowth, 0.0)\n noutdense = noutres3 + ndenselayer * densegrowth\n\n self.up1 = UpTransition(spatial_dims, noutdense, noutres2, upsample_mode)\n self.dense1 = DenseBlock(spatial_dims, ndenselayer, noutres2, densebn, densegrowth, 0.0)\n noutdense1 = noutres2 + ndenselayer * densegrowth\n\n self.up2 = UpTransition(spatial_dims, noutdense1, noutres1, upsample_mode)\n self.dense2 = DenseBlock(spatial_dims, ndenselayer, noutres1, densebn, densegrowth, 0.0)\n noutdense2 = noutres1 + ndenselayer * densegrowth\n\n self.trans1 = Projection(spatial_dims, noutdense2, num_init_features)\n self.dense3 = DenseBlock(spatial_dims, ndenselayer, num_init_features, densebn, densegrowth, 0.0)\n noutdense3 = num_init_features + densegrowth * ndenselayer\n\n self.up3 = UpTransition(spatial_dims, noutdense3, num_init_features, upsample_mode)\n self.dense4 = DenseBlock(spatial_dims, ndenselayer, num_init_features, densebn, densegrowth, 0.0)\n noutdense4 = num_init_features + densegrowth * ndenselayer\n\n self.psp = PSP(spatial_dims, psp_block_num, noutdense4, upsample_mode)\n self.final = Final(spatial_dims, psp_block_num + noutdense4, out_channels, upsample_mode)\n\n # Initialise parameters\n for m in self.modules():\n if isinstance(m, (conv_type, conv_trans_type)):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n elif isinstance(m, norm_type):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n\n if pretrained:\n net2d = FCN(pretrained=True, progress=progress)\n self.copy_from(net2d)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_re__DenseLayer.forward.return.torch_cat_x_new_feature": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/densenet.py_re__DenseLayer.forward.return.torch_cat_x_new_feature", "embedding": null, "metadata": {"file_path": "monai/networks/nets/densenet.py", "file_name": "densenet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 58, "span_ids": ["_DenseLayer.__init__", "_DenseLayer.forward", "_DenseLayer", "docstring"], "tokens": 431}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import re\nfrom collections import OrderedDict\nfrom typing import Callable, Sequence, Type, Union\n\nimport torch\nimport torch.nn as nn\nfrom torch.hub import load_state_dict_from_url\n\nfrom monai.networks.layers.factories import Conv, Dropout, Norm, Pool\n\n\nclass _DenseLayer(nn.Module):\n def __init__(\n self, spatial_dims: int, in_channels: int, growth_rate: int, bn_size: int, dropout_prob: float\n ) -> None:\n \"\"\"\n Args:\n spatial_dims: number of spatial dimensions of the input image.\n in_channels: number of the input channel.\n growth_rate: how many filters to add each layer (k in paper).\n bn_size: multiplicative factor for number of bottle neck layers.\n (i.e. bn_size * k features in the bottleneck layer)\n dropout_prob: dropout rate after each dense layer.\n \"\"\"\n super(_DenseLayer, self).__init__()\n\n out_channels = bn_size * growth_rate\n conv_type: Callable = Conv[Conv.CONV, spatial_dims]\n norm_type: Callable = Norm[Norm.BATCH, spatial_dims]\n dropout_type: Callable = Dropout[Dropout.DROPOUT, spatial_dims]\n\n self.layers = nn.Sequential()\n\n self.layers.add_module(\"norm1\", norm_type(in_channels))\n self.layers.add_module(\"relu1\", nn.ReLU(inplace=True))\n self.layers.add_module(\"conv1\", conv_type(in_channels, out_channels, kernel_size=1, bias=False))\n\n self.layers.add_module(\"norm2\", norm_type(out_channels))\n self.layers.add_module(\"relu2\", nn.ReLU(inplace=True))\n self.layers.add_module(\"conv2\", conv_type(out_channels, growth_rate, kernel_size=3, padding=1, bias=False))\n\n if dropout_prob > 0:\n self.layers.add_module(\"dropout\", dropout_type(dropout_prob))\n\n def forward(self, x: torch.Tensor) -> torch.Tensor:\n new_features = self.layers(x)\n return torch.cat([x, new_features], 1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.get_module_list_DynUNet.get_module_list.return.nn_ModuleList_layers_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.get_module_list_DynUNet.get_module_list.return.nn_ModuleList_layers_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/dynunet.py", "file_name": "dynunet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 170, "end_line": 207, "span_ids": ["DynUNet.get_module_list"], "tokens": 313}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DynUNet(nn.Module):\n\n def get_module_list(\n self,\n in_channels: List[int],\n out_channels: List[int],\n kernel_size: Sequence[Union[Sequence[int], int]],\n strides: Sequence[Union[Sequence[int], int]],\n conv_block: nn.Module,\n upsample_kernel_size: Optional[Sequence[Union[Sequence[int], int]]] = None,\n ):\n layers = []\n if upsample_kernel_size is not None:\n for in_c, out_c, kernel, stride, up_kernel in zip(\n in_channels, out_channels, kernel_size, strides, upsample_kernel_size\n ):\n params = {\n \"spatial_dims\": self.spatial_dims,\n \"in_channels\": in_c,\n \"out_channels\": out_c,\n \"kernel_size\": kernel,\n \"stride\": stride,\n \"norm_name\": self.norm_name,\n \"upsample_kernel_size\": up_kernel,\n }\n layer = conv_block(**params)\n layers.append(layer)\n else:\n for in_c, out_c, kernel, stride in zip(in_channels, out_channels, kernel_size, strides):\n params = {\n \"spatial_dims\": self.spatial_dims,\n \"in_channels\": in_c,\n \"out_channels\": out_c,\n \"kernel_size\": kernel,\n \"stride\": stride,\n \"norm_name\": self.norm_name,\n }\n layer = conv_block(**params)\n layers.append(layer)\n return nn.ModuleList(layers)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.get_deep_supervision_heads_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/networks/nets/dynunet.py_DynUNet.get_deep_supervision_heads_", "embedding": null, "metadata": {"file_path": "monai/networks/nets/dynunet.py", "file_name": "dynunet.py", "file_type": "text/x-python", "category": "implementation", "start_line": 291, "end_line": 307, "span_ids": ["DynUNet.get_deep_supervision_heads", "DynUNet.initialize_weights", "impl:3"], "tokens": 170}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DynUNet(nn.Module):\n\n def get_deep_supervision_heads(self):\n return nn.ModuleList([self.get_output_block(i + 1) for i in range(len(self.upsamples) - 1)])\n\n @staticmethod\n def initialize_weights(module):\n name = module.__class__.__name__.lower()\n if \"conv3d\" in name or \"conv2d\" in name:\n nn.init.kaiming_normal_(module.weight, a=0.01)\n if module.bias is not None:\n nn.init.constant_(module.bias, 0)\n elif \"norm\" in name:\n nn.init.normal_(module.weight, 1.0, 0.02)\n nn.init.zeros_(module.bias)\n\n\nDynUnet = Dynunet = dynunet = DynUNet", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/novograd.py_from_typing_import_Callab_Novograd.__setstate__.for_group_in_self_param_g.group_setdefault_amsgrad": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/novograd.py_from_typing_import_Callab_Novograd.__setstate__.for_group_in_self_param_g.group_setdefault_amsgrad", "embedding": null, "metadata": {"file_path": "monai/optimizers/novograd.py", "file_name": "novograd.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 71, "span_ids": ["Novograd", "Novograd.__setstate__", "Novograd.__init__", "docstring"], "tokens": 644}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from typing import Callable, Iterable, Optional, Tuple\n\nimport torch\nfrom torch.optim import Optimizer\n\n\nclass Novograd(Optimizer):\n \"\"\"\n Novograd based on `Stochastic Gradient Methods with Layer-wise Adaptive Moments for Training of Deep Networks\n `_.\n The code is adapted from the implementations in `Jasper for PyTorch\n `_,\n and `OpenSeq2Seq `_.\n\n Args:\n params: iterable of parameters to optimize or dicts defining parameter groups.\n lr: learning rate. Defaults to 1e-3.\n betas: coefficients used for computing running averages of gradient and its square. Defaults to (0.9, 0.98).\n eps: term added to the denominator to improve numerical stability. Defaults to 1e-8.\n weight_decay: weight decay (L2 penalty). Defaults to 0.\n grad_averaging: gradient averaging. Defaults to ``False``.\n amsgrad: whether to use the AMSGrad variant of this algorithm from the paper\n `On the Convergence of Adam and Beyond `_. Defaults to ``False``.\n \"\"\"\n\n def __init__(\n self,\n params: Iterable,\n lr: float = 1e-3,\n betas: Tuple[float, float] = (0.9, 0.98),\n eps: float = 1e-8,\n weight_decay: float = 0,\n grad_averaging: bool = False,\n amsgrad: bool = False,\n ):\n if 0.0 > lr:\n raise ValueError(\"Invalid learning rate: {}\".format(lr))\n if 0.0 > eps:\n raise ValueError(\"Invalid epsilon value: {}\".format(eps))\n if not 0.0 <= betas[0] < 1.0:\n raise ValueError(\"Invalid beta parameter at index 0: {}\".format(betas[0]))\n if not 0.0 <= betas[1] < 1.0:\n raise ValueError(\"Invalid beta parameter at index 1: {}\".format(betas[1]))\n if 0.0 > weight_decay:\n raise ValueError(\"Invalid weight_decay value: {}\".format(weight_decay))\n defaults = dict(\n lr=lr,\n betas=betas,\n eps=eps,\n weight_decay=weight_decay,\n grad_averaging=grad_averaging,\n amsgrad=amsgrad,\n )\n\n super(Novograd, self).__init__(params, defaults)\n\n def __setstate__(self, state):\n super(Novograd, self).__setstate__(state)\n for group in self.param_groups:\n group.setdefault(\"amsgrad\", False)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/novograd.py_Novograd.step_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/optimizers/novograd.py_Novograd.step_", "embedding": null, "metadata": {"file_path": "monai/optimizers/novograd.py", "file_name": "novograd.py", "file_type": "text/x-python", "category": "implementation", "start_line": 73, "end_line": 137, "span_ids": ["Novograd.step"], "tokens": 542}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class Novograd(Optimizer):\n\n def step(self, closure: Optional[Callable] = None):\n \"\"\"Performs a single optimization step.\n\n Arguments:\n closure: A closure that reevaluates the model and returns the loss. Defaults to ``None``.\n \"\"\"\n loss = None\n if closure is not None:\n loss = closure()\n\n for group in self.param_groups:\n for p in group[\"params\"]:\n if p.grad is None:\n continue\n grad = p.grad.data\n if grad.is_sparse:\n raise RuntimeError(\"Sparse gradients are not supported.\")\n amsgrad = group[\"amsgrad\"]\n\n state = self.state[p]\n\n # State initialization\n if len(state) == 0:\n state[\"step\"] = 0\n # Exponential moving average of gradient values\n state[\"exp_avg\"] = torch.zeros_like(p.data)\n # Exponential moving average of squared gradient values\n state[\"exp_avg_sq\"] = torch.zeros([]).to(state[\"exp_avg\"].device)\n if amsgrad:\n # Maintains max of all exp. moving avg. of sq. grad. values\n state[\"max_exp_avg_sq\"] = torch.zeros([]).to(state[\"exp_avg\"].device)\n\n exp_avg, exp_avg_sq = state[\"exp_avg\"], state[\"exp_avg_sq\"]\n if amsgrad:\n max_exp_avg_sq = state[\"max_exp_avg_sq\"]\n beta1, beta2 = group[\"betas\"]\n\n state[\"step\"] += 1\n\n norm = torch.sum(torch.pow(grad, 2))\n\n if exp_avg_sq == 0:\n exp_avg_sq.copy_(norm)\n else:\n exp_avg_sq.mul_(beta2).add_(norm, alpha=1 - beta2)\n\n if amsgrad:\n # Maintains the maximum of all 2nd moment running avg. till now\n torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq)\n # Use the max. for normalizing running avg. of gradient\n denom = max_exp_avg_sq.sqrt().add_(group[\"eps\"])\n else:\n denom = exp_avg_sq.sqrt().add_(group[\"eps\"])\n\n grad.div_(denom)\n if group[\"weight_decay\"] != 0:\n grad.add_(p.data, alpha=group[\"weight_decay\"])\n if group[\"grad_averaging\"]:\n grad.mul_(1 - beta1)\n exp_avg.mul_(beta1).add_(grad)\n\n p.data.add_(exp_avg, alpha=-group[\"lr\"])\n\n return loss", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel_RandCropByPosNegLabel._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel_RandCropByPosNegLabel._", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 499, "end_line": 540, "span_ids": ["RandCropByPosNegLabel"], "tokens": 709}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandCropByPosNegLabel(Randomizable):\n \"\"\"\n Crop random fixed sized regions with the center being a foreground or background voxel\n based on the Pos Neg Ratio.\n And will return a list of arrays for all the cropped images.\n For example, crop two (3 x 3) arrays from (5 x 5) array with pos/neg=1::\n\n [[[0, 0, 0, 0, 0],\n [0, 1, 2, 1, 0], [[0, 1, 2], [[2, 1, 0],\n [0, 1, 3, 0, 0], --> [0, 1, 3], [3, 0, 0],\n [0, 0, 0, 0, 0], [0, 0, 0]] [0, 0, 0]]\n [0, 0, 0, 0, 0]]]\n\n Args:\n spatial_size: the spatial size of the crop region e.g. [224, 224, 128].\n If its components have non-positive values, the corresponding size of `label` will be used.\n label: the label image that is used for finding foreground/background, if None, must set at\n `self.__call__`. Non-zero indicates foreground, zero indicates background.\n pos: used with `neg` together to calculate the ratio ``pos / (pos + neg)`` for the probability\n to pick a foreground voxel as a center rather than a background voxel.\n neg: used with `pos` together to calculate the ratio ``pos / (pos + neg)`` for the probability\n to pick a foreground voxel as a center rather than a background voxel.\n num_samples: number of samples (crop regions) to take in each list.\n image: optional image data to help select valid area, can be same as `img` or another image array.\n if not None, use ``label == 0 & image > image_threshold`` to select the negative\n sample (background) center. So the crop center will only come from the valid image areas.\n image_threshold: if enabled `image`, use ``image > image_threshold`` to determine\n the valid image content areas.\n fg_indices: if provided pre-computed foreground indices of `label`, will ignore above `image` and\n `image_threshold`, and randomly select crop centers based on them, need to provide `fg_indices`\n and `bg_indices` together, expect to be 1 dim array of spatial indices after flattening.\n a typical usage is to call `FgBgToIndices` transform first and cache the results.\n bg_indices: if provided pre-computed background indices of `label`, will ignore above `image` and\n `image_threshold`, and randomly select crop centers based on them, need to provide `fg_indices`\n and `bg_indices` together, expect to be 1 dim array of spatial indices after flattening.\n a typical usage is to call `FgBgToIndices` transform first and cache the results.\n\n Raises:\n ValueError: When ``pos`` or ``neg`` are negative.\n ValueError: When ``pos=0`` and ``neg=0``. Incompatible values.\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.__init___RandCropByPosNegLabel.__init__.self.bg_indices.bg_indices": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.__init___RandCropByPosNegLabel.__init__.self.bg_indices.bg_indices", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 542, "end_line": 566, "span_ids": ["RandCropByPosNegLabel.__init__"], "tokens": 258}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandCropByPosNegLabel(Randomizable):\n\n def __init__(\n self,\n spatial_size: Union[Sequence[int], int],\n label: Optional[np.ndarray] = None,\n pos: float = 1.0,\n neg: float = 1.0,\n num_samples: int = 1,\n image: Optional[np.ndarray] = None,\n image_threshold: float = 0.0,\n fg_indices: Optional[np.ndarray] = None,\n bg_indices: Optional[np.ndarray] = None,\n ) -> None:\n self.spatial_size = ensure_tuple(spatial_size)\n self.label = label\n if pos < 0 or neg < 0:\n raise ValueError(f\"pos and neg must be nonnegative, got pos={pos} neg={neg}.\")\n if pos + neg == 0:\n raise ValueError(\"Incompatible values: pos=0 and neg=0.\")\n self.pos_ratio = pos / (pos + neg)\n self.num_samples = num_samples\n self.image = image\n self.image_threshold = image_threshold\n self.centers: Optional[List[List[np.ndarray]]] = None\n self.fg_indices = fg_indices\n self.bg_indices = bg_indices", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.randomize_RandCropByPosNegLabel.randomize.self.centers.generate_pos_neg_label_cr": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.randomize_RandCropByPosNegLabel.randomize.self.centers.generate_pos_neg_label_cr", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 568, "end_line": 583, "span_ids": ["RandCropByPosNegLabel.randomize"], "tokens": 171}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandCropByPosNegLabel(Randomizable):\n\n def randomize(\n self,\n label: np.ndarray,\n fg_indices: Optional[np.ndarray] = None,\n bg_indices: Optional[np.ndarray] = None,\n image: Optional[np.ndarray] = None,\n ) -> None:\n self.spatial_size = fall_back_tuple(self.spatial_size, default=label.shape[1:])\n if fg_indices is None or bg_indices is None:\n fg_indices_, bg_indices_ = map_binary_to_indices(label, image, self.image_threshold)\n else:\n fg_indices_ = fg_indices\n bg_indices_ = bg_indices\n self.centers = generate_pos_neg_label_crop_centers(\n self.spatial_size, self.num_samples, self.pos_ratio, label.shape[1:], fg_indices_, bg_indices_, self.R\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.__call___RandCropByPosNegLabel.__call__.return.results": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_RandCropByPosNegLabel.__call___RandCropByPosNegLabel.__call__.return.results", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 585, "end_line": 626, "span_ids": ["RandCropByPosNegLabel.__call__"], "tokens": 441}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandCropByPosNegLabel(Randomizable):\n\n def __call__(\n self,\n img: np.ndarray,\n label: Optional[np.ndarray] = None,\n image: Optional[np.ndarray] = None,\n fg_indices: Optional[np.ndarray] = None,\n bg_indices: Optional[np.ndarray] = None,\n ) -> List[np.ndarray]:\n \"\"\"\n Args:\n img: input data to crop samples from based on the pos/neg ratio of `label` and `image`.\n Assumes `img` is a channel-first array.\n label: the label image that is used for finding foreground/background, if None, use `self.label`.\n image: optional image data to help select valid area, can be same as `img` or another image array.\n use ``label == 0 & image > image_threshold`` to select the negative sample(background) center.\n so the crop center will only exist on valid image area. if None, use `self.image`.\n fg_indices: foreground indices to randomly select crop centers,\n need to provide `fg_indices` and `bg_indices` together.\n bg_indices: background indices to randomly select crop centers,\n need to provide `fg_indices` and `bg_indices` together.\n\n \"\"\"\n if label is None:\n label = self.label\n if label is None:\n raise ValueError(\"label should be provided.\")\n if image is None:\n image = self.image\n if fg_indices is None or bg_indices is None:\n if self.fg_indices is not None and self.bg_indices is not None:\n fg_indices = self.fg_indices\n bg_indices = self.bg_indices\n else:\n fg_indices, bg_indices = map_binary_to_indices(label, image, self.image_threshold)\n self.randomize(label, fg_indices, bg_indices, image)\n results: List[np.ndarray] = []\n if self.centers is not None:\n for center in self.centers:\n cropper = SpatialCrop(roi_center=tuple(center), roi_size=self.spatial_size) # type: ignore\n results.append(cropper(img))\n\n return results", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_ResizeWithPadOrCrop_ResizeWithPadOrCrop.__init__.self.cropper.CenterSpatialCrop_roi_siz": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/array.py_ResizeWithPadOrCrop_ResizeWithPadOrCrop.__init__.self.cropper.CenterSpatialCrop_roi_siz", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 629, "end_line": 652, "span_ids": ["ResizeWithPadOrCrop", "ResizeWithPadOrCrop.__init__"], "tokens": 298}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class ResizeWithPadOrCrop(Transform):\n \"\"\"\n Resize an image to a target spatial size by either centrally cropping the image or\n padding it evenly with a user-specified mode.\n When the dimension is smaller than the target size, do symmetric padding along that dim.\n When the dimension is larger than the target size, do central cropping along that dim.\n\n Args:\n spatial_size: the spatial size of output data after padding or crop.\n If has non-positive values, the corresponding size of input image will be used (no padding).\n mode: {``\"constant\"``, ``\"edge\"``, ``\"linear_ramp\"``, ``\"maximum\"``, ``\"mean\"``,\n ``\"median\"``, ``\"minimum\"``, ``\"reflect\"``, ``\"symmetric\"``, ``\"wrap\"``, ``\"empty\"``}\n One of the listed string values or a user supplied function for padding. Defaults to ``\"constant\"``.\n See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html\n\n \"\"\"\n\n def __init__(\n self,\n spatial_size: Union[Sequence[int], int],\n mode: Union[NumpyPadMode, str] = NumpyPadMode.CONSTANT,\n ):\n self.padder = SpatialPad(spatial_size=spatial_size, mode=mode)\n self.cropper = CenterSpatialCrop(roi_size=spatial_size)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld_RandCropByPosNegLabeld._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld_RandCropByPosNegLabeld._", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 703, "end_line": 744, "span_ids": ["RandCropByPosNegLabeld"], "tokens": 658}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandCropByPosNegLabeld(Randomizable, MapTransform):\n \"\"\"\n Dictionary-based version :py:class:`monai.transforms.RandCropByPosNegLabel`.\n Crop random fixed sized regions with the center being a foreground or background voxel\n based on the Pos Neg Ratio.\n Suppose all the expected fields specified by `keys` have same shape,\n and add `patch_index` to the corresponding meta data.\n And will return a list of dictionaries for all the cropped images.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n label_key: name of key for label image, this will be used for finding foreground/background.\n spatial_size: the spatial size of the crop region e.g. [224, 224, 128].\n If its components have non-positive values, the corresponding size of `data[label_key]` will be used.\n pos: used with `neg` together to calculate the ratio ``pos / (pos + neg)`` for the probability\n to pick a foreground voxel as a center rather than a background voxel.\n neg: used with `pos` together to calculate the ratio ``pos / (pos + neg)`` for the probability\n to pick a foreground voxel as a center rather than a background voxel.\n num_samples: number of samples (crop regions) to take in each list.\n image_key: if image_key is not None, use ``label == 0 & image > image_threshold`` to select\n the negative sample(background) center. so the crop center will only exist on valid image area.\n image_threshold: if enabled image_key, use ``image > image_threshold`` to determine\n the valid image content area.\n fg_indices_key: if provided pre-computed foreground indices of `label`, will ignore above `image_key` and\n `image_threshold`, and randomly select crop centers based on them, need to provide `fg_indices_key`\n and `bg_indices_key` together, expect to be 1 dim array of spatial indices after flattening.\n a typical usage is to call `FgBgToIndicesd` transform first and cache the results.\n bg_indices_key: if provided pre-computed background indices of `label`, will ignore above `image_key` and\n `image_threshold`, and randomly select crop centers based on them, need to provide `fg_indices_key`\n and `bg_indices_key` together, expect to be 1 dim array of spatial indices after flattening.\n a typical usage is to call `FgBgToIndicesd` transform first and cache the results.\n meta_key_postfix: use `key_{postfix}` to to fetch the meta data according to the key data,\n default is `meta_dict`, the meta data is a dictionary object.\n used to add `patch_index` to the meta dict.\n allow_missing_keys: don't raise exception if key is missing.\n\n Raises:\n ValueError: When ``pos`` or ``neg`` are negative.\n ValueError: When ``pos=0`` and ``neg=0``. Incompatible values.\n\n \"\"\"", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.__init___RandCropByPosNegLabeld.__init__.self.centers.None": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.__init___RandCropByPosNegLabeld.__init__.self.centers.None", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 746, "end_line": 775, "span_ids": ["RandCropByPosNegLabeld.__init__"], "tokens": 329}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandCropByPosNegLabeld(Randomizable, MapTransform):\n\n def __init__(\n self,\n keys: KeysCollection,\n label_key: str,\n spatial_size: Union[Sequence[int], int],\n pos: float = 1.0,\n neg: float = 1.0,\n num_samples: int = 1,\n image_key: Optional[str] = None,\n image_threshold: float = 0.0,\n fg_indices_key: Optional[str] = None,\n bg_indices_key: Optional[str] = None,\n meta_key_postfix: str = \"meta_dict\",\n allow_missing_keys: bool = False,\n ) -> None:\n MapTransform.__init__(self, keys, allow_missing_keys)\n self.label_key = label_key\n self.spatial_size: Union[Tuple[int, ...], Sequence[int], int] = spatial_size\n if pos < 0 or neg < 0:\n raise ValueError(f\"pos and neg must be nonnegative, got pos={pos} neg={neg}.\")\n if pos + neg == 0:\n raise ValueError(\"Incompatible values: pos=0 and neg=0.\")\n self.pos_ratio = pos / (pos + neg)\n self.num_samples = num_samples\n self.image_key = image_key\n self.image_threshold = image_threshold\n self.fg_indices_key = fg_indices_key\n self.bg_indices_key = bg_indices_key\n self.meta_key_postfix = meta_key_postfix\n self.centers: Optional[List[List[np.ndarray]]] = None", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.randomize_RandCropByPosNegLabeld.randomize.self.centers.generate_pos_neg_label_cr": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/croppad/dictionary.py_RandCropByPosNegLabeld.randomize_RandCropByPosNegLabeld.randomize.self.centers.generate_pos_neg_label_cr", "embedding": null, "metadata": {"file_path": "monai/transforms/croppad/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 437, "end_line": 452, "span_ids": ["RandCropByPosNegLabeld.randomize"], "tokens": 175}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandCropByPosNegLabeld(Randomizable, MapTransform):\n\n def randomize(\n self,\n label: np.ndarray,\n fg_indices: Optional[np.ndarray] = None,\n bg_indices: Optional[np.ndarray] = None,\n image: Optional[np.ndarray] = None,\n ) -> None:\n self.spatial_size = fall_back_tuple(self.spatial_size, default=label.shape[1:])\n if fg_indices is None or bg_indices is None:\n fg_indices_, bg_indices_ = map_binary_to_indices(label, image, self.image_threshold)\n else:\n fg_indices_ = fg_indices\n bg_indices_ = bg_indices\n self.centers = generate_pos_neg_label_crop_centers(\n self.spatial_size, self.num_samples, self.pos_ratio, label.shape[1:], fg_indices_, bg_indices_, self.R\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_NormalizeIntensity._normalize_NormalizeIntensity._normalize.return.img": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_NormalizeIntensity._normalize_NormalizeIntensity._normalize.return.img", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 438, "end_line": 455, "span_ids": ["NormalizeIntensity._normalize"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class NormalizeIntensity(Transform):\n\n def _normalize(self, img: np.ndarray, sub=None, div=None) -> np.ndarray:\n slices = (img != 0) if self.nonzero else np.ones(img.shape, dtype=bool)\n if not np.any(slices):\n return img\n\n _sub = sub if sub is not None else np.mean(img[slices])\n if isinstance(_sub, np.ndarray):\n _sub = _sub[slices]\n\n _div = div if div is not None else np.std(img[slices])\n if np.isscalar(_div):\n if _div == 0.0:\n _div = 1.0\n elif isinstance(_div, np.ndarray):\n _div = _div[slices]\n _div[_div == 0.0] = 1.0\n img[slices] = (img[slices] - _sub) / _div\n return img", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSharpen_GaussianSharpen.__init__.self.approx.approx": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_GaussianSharpen_GaussianSharpen.__init__.self.approx.approx", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 576, "end_line": 613, "span_ids": ["GaussianSharpen", "GaussianSharpen.__init__"], "tokens": 408}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianSharpen(Transform):\n \"\"\"\n Sharpen images using the Gaussian Blur filter.\n Referring to: http://scipy-lectures.org/advanced/image_processing/auto_examples/plot_sharpen.html.\n The algorithm is shown as below\n\n .. code-block:: python\n\n blurred_f = gaussian_filter(img, sigma1)\n filter_blurred_f = gaussian_filter(blurred_f, sigma2)\n img = blurred_f + alpha * (blurred_f - filter_blurred_f)\n\n A set of default values `sigma1=3.0`, `sigma2=1.0` and `alpha=30.0` is provide for reference.\n\n Args:\n sigma1: sigma parameter for the first gaussian kernel. if a list of values, must match the count\n of spatial dimensions of input data, and apply every value in the list to 1 spatial dimension.\n if only 1 value provided, use it for all spatial dimensions.\n sigma2: sigma parameter for the second gaussian kernel. if a list of values, must match the count\n of spatial dimensions of input data, and apply every value in the list to 1 spatial dimension.\n if only 1 value provided, use it for all spatial dimensions.\n alpha: weight parameter to compute the final result.\n approx: discrete Gaussian kernel type, available options are \"erf\", \"sampled\", and \"scalespace\".\n see also :py:meth:`monai.networks.layers.GaussianFilter`.\n\n \"\"\"\n\n def __init__(\n self,\n sigma1: Union[Sequence[float], float] = 3.0,\n sigma2: Union[Sequence[float], float] = 1.0,\n alpha: float = 30.0,\n approx: str = \"erf\",\n ) -> None:\n self.sigma1 = sigma1\n self.sigma2 = sigma2\n self.alpha = alpha\n self.approx = approx", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSharpen.__call___RandGaussianSharpen.__call__.return.GaussianSharpen_sigma1_si": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandGaussianSharpen.__call___RandGaussianSharpen.__call__.return.GaussianSharpen_sigma1_si", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1003, "end_line": 1009, "span_ids": ["RandGaussianSharpen.__call__"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandGaussianSharpen(RandomizableTransform):\n\n def __call__(self, img: np.ndarray):\n self.randomize()\n if not self._do_transform:\n return img\n sigma1 = ensure_tuple_size(tup=(self.x1, self.y1, self.z1), dim=img.ndim - 1)\n sigma2 = ensure_tuple_size(tup=(self.x2, self.y2, self.z2), dim=img.ndim - 1)\n return GaussianSharpen(sigma1=sigma1, sigma2=sigma2, alpha=self.a, approx=self.approx)(img)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandHistogramShift.randomize_RandHistogramShift.randomize.for_i_in_range_1_num_con.self_floating_control_poi": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandHistogramShift.randomize_RandHistogramShift.randomize.for_i_in_range_1_num_con.self_floating_control_poi", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1037, "end_line": 1045, "span_ids": ["RandHistogramShift.randomize"], "tokens": 141}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandHistogramShift(RandomizableTransform):\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n num_control_point = self.R.randint(self.num_control_points[0], self.num_control_points[1] + 1)\n self.reference_control_points = np.linspace(0, 1, num_control_point)\n self.floating_control_points = np.copy(self.reference_control_points)\n for i in range(1, num_control_point - 1):\n self.floating_control_points[i] = self.R.uniform(\n self.floating_control_points[i - 1], self.floating_control_points[i + 1]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandHistogramShift.__call___": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/array.py_RandHistogramShift.__call___", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 1047, "end_line": 1057, "span_ids": ["RandHistogramShift.__call__"], "tokens": 126}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandHistogramShift(RandomizableTransform):\n\n def __call__(self, img: np.ndarray) -> np.ndarray:\n self.randomize()\n if not self._do_transform:\n return img\n img_min, img_max = img.min(), img.max()\n reference_control_points_scaled = self.reference_control_points * (img_max - img_min) + img_min\n floating_control_points_scaled = self.floating_control_points * (img_max - img_min) + img_min\n return np.asarray(\n np.interp(img, reference_control_points_scaled, floating_control_points_scaled), dtype=img.dtype\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandHistogramShiftd.randomize_RandHistogramShiftd.randomize.for_i_in_range_1_num_con.self_floating_control_poi": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandHistogramShiftd.randomize_RandHistogramShiftd.randomize.for_i_in_range_1_num_con.self_floating_control_poi", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 936, "end_line": 944, "span_ids": ["RandHistogramShiftd.randomize"], "tokens": 146}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandHistogramShiftd(RandomizableTransform, MapTransform):\n\n def randomize(self, data: Optional[Any] = None) -> None:\n super().randomize(None)\n num_control_point = self.R.randint(self.num_control_points[0], self.num_control_points[1] + 1)\n self.reference_control_points = np.linspace(0, 1, num_control_point)\n self.floating_control_points = np.copy(self.reference_control_points)\n for i in range(1, num_control_point - 1):\n self.floating_control_points[i] = self.R.uniform(\n self.floating_control_points[i - 1], self.floating_control_points[i + 1]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandHistogramShiftd.__call___RandHistogramShiftd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/intensity/dictionary.py_RandHistogramShiftd.__call___RandHistogramShiftd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/intensity/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 946, "end_line": 957, "span_ids": ["RandHistogramShiftd.__call__"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class RandHistogramShiftd(RandomizableTransform, MapTransform):\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n self.randomize()\n if not self._do_transform:\n return d\n for key in self.key_iterator(d):\n img_min, img_max = d[key].min(), d[key].max()\n reference_control_points_scaled = self.reference_control_points * (img_max - img_min) + img_min\n floating_control_points_scaled = self.floating_control_points * (img_max - img_min) + img_min\n dtype = d[key].dtype\n d[key] = np.interp(d[key], reference_control_points_scaled, floating_control_points_scaled).astype(dtype)\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_LoadImage.__call___LoadImage.__call__.return.img_array_meta_data": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/array.py_LoadImage.__call___LoadImage.__call__.return.img_array_meta_data", "embedding": null, "metadata": {"file_path": "monai/transforms/io/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 127, "end_line": 164, "span_ids": ["LoadImage.__call__"], "tokens": 331}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoadImage(Transform):\n\n def __call__(\n self,\n filename: Union[Sequence[str], str],\n reader: Optional[ImageReader] = None,\n ):\n \"\"\"\n Args:\n filename: path file or file-like object or a list of files.\n will save the filename to meta_data with key `filename_or_obj`.\n if provided a list of files, use the filename of first file.\n reader: runtime reader to load image file and meta data.\n\n \"\"\"\n if reader is None or not reader.verify_suffix(filename):\n for r in reversed(self.readers):\n if r.verify_suffix(filename):\n reader = r\n break\n\n if reader is None:\n raise RuntimeError(\n f\"can not find suitable reader for this file: {filename}. \\\n Please install dependency libraries: (nii, nii.gz) -> Nibabel, (png, jpg, bmp) -> PIL, \\\n (npz, npy) -> Numpy, others -> ITK. Refer to the installation instruction: \\\n https://docs.monai.io/en/latest/installation.html#installing-the-recommended-dependencies.\"\n )\n\n img = reader.read(filename)\n img_array, meta_data = reader.get_data(img)\n img_array = img_array.astype(self.dtype)\n\n if self.image_only:\n return img_array\n meta_data[Key.FILENAME_OR_OBJ] = ensure_tuple(filename)[0]\n # make sure all elements in metadata are little endian\n meta_data = switch_endianness(meta_data, \">\", \"<\")\n\n return img_array, meta_data", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/dictionary.py_LoadImaged.__call___LoadImaged.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/io/dictionary.py_LoadImaged.__call___LoadImaged.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/io/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 97, "end_line": 120, "span_ids": ["LoadImaged.__call__"], "tokens": 247}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LoadImaged(MapTransform):\n\n def __call__(self, data, reader: Optional[ImageReader] = None):\n \"\"\"\n Raises:\n KeyError: When not ``self.overwriting`` and key already exists in ``data``.\n\n \"\"\"\n d = dict(data)\n for key in self.key_iterator(d):\n data = self._loader(d[key], reader)\n if self._loader.image_only:\n if not isinstance(data, np.ndarray):\n raise ValueError(\"loader must return a numpy array (because image_only=True was used).\")\n d[key] = data\n else:\n if not isinstance(data, (tuple, list)):\n raise ValueError(\"loader must return a tuple or list (because image_only=False was used).\")\n d[key] = data[0]\n if not isinstance(data[1], dict):\n raise ValueError(\"metadata must be a dict.\")\n key_to_add = f\"{key}_{self.meta_key_postfix}\"\n if key_to_add in d and not self.overwriting:\n raise KeyError(f\"Meta data with key {key_to_add} already exists and overwriting=False.\")\n d[key_to_add] = data[1]\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_LabelToMask.__call___LabelToMask.__call__.return.np_any_data_axis_0_keep": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_LabelToMask.__call___LabelToMask.__call__.return.np_any_data_axis_0_keep", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 579, "end_line": 600, "span_ids": ["LabelToMask.__call__"], "tokens": 239}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class LabelToMask(Transform):\n\n def __call__(\n self, img: np.ndarray, select_labels: Optional[Union[Sequence[int], int]] = None, merge_channels: bool = False\n ):\n \"\"\"\n Args:\n select_labels: labels to generate mask from. for 1 channel label, the `select_labels`\n is the expected label values, like: [1, 2, 3]. for One-Hot format label, the\n `select_labels` is the expected channel indices.\n merge_channels: whether to use `np.any()` to merge the result on channel dim. if yes,\n will return a single channel mask with binary data.\n \"\"\"\n if select_labels is None:\n select_labels = self.select_labels\n else:\n select_labels = ensure_tuple(select_labels)\n\n if img.shape[0] > 1:\n data = img[[*select_labels]]\n else:\n data = np.where(np.in1d(img, select_labels), True, False).reshape(img.shape)\n\n return np.any(data, axis=0, keepdims=True) if (merge_channels or self.merge_channels) else data", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_FgBgToIndices_FgBgToIndices.__init__.self.output_shape.output_shape": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/array.py_FgBgToIndices_FgBgToIndices.__init__.self.output_shape.output_shape", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/array.py", "file_name": "array.py", "file_type": "text/x-python", "category": "implementation", "start_line": 459, "end_line": 475, "span_ids": ["FgBgToIndices", "FgBgToIndices.__init__"], "tokens": 201}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FgBgToIndices(Transform):\n def __init__(self, image_threshold: float = 0.0, output_shape: Optional[Sequence[int]] = None) -> None:\n \"\"\"\n Compute foreground and background of the input label data, return the indices.\n If no output_shape specified, output data will be 1 dim indices after flattening.\n This transform can help pre-compute foreground and background regions for other transforms.\n A typical usage is to randomly select foreground and background to crop.\n The main logic is based on :py:class:`monai.transforms.utils.map_binary_to_indices`.\n\n Args:\n image_threshold: if enabled `image` at runtime, use ``image > image_threshold`` to\n determine the valid image content area and select background only in this area.\n output_shape: expected shape of output indices. if not None, unravel indices to specified shape.\n\n \"\"\"\n self.image_threshold = image_threshold\n self.output_shape = output_shape", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_FgBgToIndicesd_FgBgToIndicesd.__call__.return.d": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utility/dictionary.py_FgBgToIndicesd_FgBgToIndicesd.__call__.return.d", "embedding": null, "metadata": {"file_path": "monai/transforms/utility/dictionary.py", "file_name": "dictionary.py", "file_type": "text/x-python", "category": "implementation", "start_line": 806, "end_line": 848, "span_ids": ["FgBgToIndicesd", "FgBgToIndicesd.__init__", "FgBgToIndicesd.__call__"], "tokens": 492}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class FgBgToIndicesd(MapTransform):\n \"\"\"\n Dictionary-based wrapper of :py:class:`monai.transforms.FgBgToIndices`.\n\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n fg_postfix: postfix to save the computed foreground indices in dict.\n for example, if computed on `label` and `postfix = \"_fg_indices\"`, the key will be `label_fg_indices`.\n bg_postfix: postfix to save the computed background indices in dict.\n for example, if computed on `label` and `postfix = \"_bg_indices\"`, the key will be `label_bg_indices`.\n image_key: if image_key is not None, use ``label == 0 & image > image_threshold`` to determine\n the negative sample(background). so the output items will not map to all the voxels in the label.\n image_threshold: if enabled image_key, use ``image > image_threshold`` to determine\n the valid image content area and select background only in this area.\n output_shape: expected shape of output indices. if not None, unravel indices to specified shape.\n allow_missing_keys: don't raise exception if key is missing.\n\n \"\"\"\n\n def __init__(\n self,\n keys: KeysCollection,\n fg_postfix: str = \"_fg_indices\",\n bg_postfix: str = \"_bg_indices\",\n image_key: Optional[str] = None,\n image_threshold: float = 0.0,\n output_shape: Optional[Sequence[int]] = None,\n allow_missing_keys: bool = False,\n ) -> None:\n super().__init__(keys, allow_missing_keys)\n self.fg_postfix = fg_postfix\n self.bg_postfix = bg_postfix\n self.image_key = image_key\n self.converter = FgBgToIndices(image_threshold, output_shape)\n\n def __call__(self, data: Mapping[Hashable, np.ndarray]) -> Dict[Hashable, np.ndarray]:\n d = dict(data)\n image = d[self.image_key] if self.image_key else None\n for key in self.key_iterator(d):\n d[str(key) + self.fg_postfix], d[str(key) + self.bg_postfix] = self.converter(d[key], image)\n\n return d", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_map_binary_to_indices_map_binary_to_indices.return.fg_indices_bg_indices": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_map_binary_to_indices_map_binary_to_indices.return.fg_indices_bg_indices", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 182, "end_line": 211, "span_ids": ["map_binary_to_indices"], "tokens": 382}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def map_binary_to_indices(\n label: np.ndarray,\n image: Optional[np.ndarray] = None,\n image_threshold: float = 0.0,\n) -> Tuple[np.ndarray, np.ndarray]:\n \"\"\"\n Compute the foreground and background of input label data, return the indices after fattening.\n For example:\n ``label = np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]])``\n ``foreground indices = np.array([1, 2, 3, 5, 6, 7])`` and ``background indices = np.array([0, 4, 8])``\n\n Args:\n label: use the label data to get the foreground/background information.\n image: if image is not None, use ``label = 0 & image > image_threshold``\n to define background. so the output items will not map to all the voxels in the label.\n image_threshold: if enabled `image`, use ``image > image_threshold`` to\n determine the valid image content area and select background only in this area.\n\n \"\"\"\n # Prepare fg/bg indices\n if label.shape[0] > 1:\n label = label[1:] # for One-Hot format data, remove the background channel\n label_flat = np.any(label, axis=0).ravel() # in case label has multiple dimensions\n fg_indices = np.nonzero(label_flat)[0]\n if image is not None:\n img_flat = np.any(image > image_threshold, axis=0).ravel()\n bg_indices = np.nonzero(np.logical_and(img_flat, ~label_flat))[0]\n else:\n bg_indices = np.nonzero(~label_flat)[0]\n return fg_indices, bg_indices", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_generate_pos_neg_label_crop_centers_generate_pos_neg_label_crop_centers.return.centers": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/transforms/utils.py_generate_pos_neg_label_crop_centers_generate_pos_neg_label_crop_centers.return.centers", "embedding": null, "metadata": {"file_path": "monai/transforms/utils.py", "file_name": "utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 305, "end_line": 378, "span_ids": ["generate_pos_neg_label_crop_centers"], "tokens": 758}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def generate_pos_neg_label_crop_centers(\n spatial_size: Union[Sequence[int], int],\n num_samples: int,\n pos_ratio: float,\n label_spatial_shape: Sequence[int],\n fg_indices: np.ndarray,\n bg_indices: np.ndarray,\n rand_state: np.random.RandomState = np.random,\n) -> List[List[np.ndarray]]:\n \"\"\"\n Generate valid sample locations based on the label with option for specifying foreground ratio\n Valid: samples sitting entirely within image, expected input shape: [C, H, W, D] or [C, H, W]\n\n Args:\n spatial_size: spatial size of the ROIs to be sampled.\n num_samples: total sample centers to be generated.\n pos_ratio: ratio of total locations generated that have center being foreground.\n label_spatial_shape: spatial shape of the original label data to unravel selected centers.\n fg_indices: pre-computed foreground indices in 1 dimension.\n bg_indices: pre-computed background indices in 1 dimension.\n rand_state: numpy randomState object to align with other modules.\n\n Raises:\n ValueError: When the proposed roi is larger than the image.\n ValueError: When the foreground and background indices lengths are 0.\n\n \"\"\"\n spatial_size = fall_back_tuple(spatial_size, default=label_spatial_shape)\n if not (np.subtract(label_spatial_shape, spatial_size) >= 0).all():\n raise ValueError(\"The size of the proposed random crop ROI is larger than the image size.\")\n\n # Select subregion to assure valid roi\n valid_start = np.floor_divide(spatial_size, 2)\n # add 1 for random\n valid_end = np.subtract(label_spatial_shape + np.array(1), spatial_size / np.array(2)).astype(np.uint16)\n # int generation to have full range on upper side, but subtract unfloored size/2 to prevent rounded range\n # from being too high\n for i in range(len(valid_start)): # need this because np.random.randint does not work with same start and end\n if valid_start[i] == valid_end[i]:\n valid_end[i] += 1\n\n def _correct_centers(\n center_ori: List[np.ndarray], valid_start: np.ndarray, valid_end: np.ndarray\n ) -> List[np.ndarray]:\n for i, c in enumerate(center_ori):\n center_i = c\n if c < valid_start[i]:\n center_i = valid_start[i]\n if c >= valid_end[i]:\n center_i = valid_end[i] - 1\n center_ori[i] = center_i\n return center_ori\n\n centers = []\n fg_indices, bg_indices = np.asarray(fg_indices), np.asarray(bg_indices)\n if fg_indices.size == 0 and bg_indices.size == 0:\n raise ValueError(\"No sampling location available.\")\n\n if fg_indices.size == 0 or bg_indices.size == 0:\n warnings.warn(\n f\"N foreground {len(fg_indices)}, N background {len(bg_indices)},\"\n \"unable to generate class balanced samples.\"\n )\n pos_ratio = 0 if fg_indices.size == 0 else 1\n\n for _ in range(num_samples):\n indices_to_use = fg_indices if rand_state.rand() < pos_ratio else bg_indices\n random_int = rand_state.randint(len(indices_to_use))\n center = np.unravel_index(indices_to_use[random_int], label_spatial_shape)\n # shift center to range of valid centers\n center_ori = list(center)\n centers.append(_correct_centers(center_ori, valid_start, valid_end))\n\n return centers", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_set_determinism_set_determinism.if_seed_is_not_None_.else_restore_the_orig.torch.backends.cudnn.benchmark._flag_cudnn_benchmark": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/misc.py_set_determinism_set_determinism.if_seed_is_not_None_.else_restore_the_orig.torch.backends.cudnn.benchmark._flag_cudnn_benchmark", "embedding": null, "metadata": {"file_path": "monai/utils/misc.py", "file_name": "misc.py", "file_type": "text/x-python", "category": "implementation", "start_line": 188, "end_line": 228, "span_ids": ["set_determinism"], "tokens": 361}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def set_determinism(\n seed: Optional[int] = np.iinfo(np.uint32).max,\n additional_settings: Optional[Union[Sequence[Callable[[int], Any]], Callable[[int], Any]]] = None,\n) -> None:\n \"\"\"\n Set random seed for modules to enable or disable deterministic training.\n\n Args:\n seed: the random seed to use, default is np.iinfo(np.int32).max.\n It is recommended to set a large seed, i.e. a number that has a good balance\n of 0 and 1 bits. Avoid having many 0 bits in the seed.\n if set to None, will disable deterministic training.\n additional_settings: additional settings\n that need to set random seed.\n\n \"\"\"\n if seed is None:\n # cast to 32 bit seed for CUDA\n seed_ = torch.default_generator.seed() % (np.iinfo(np.int32).max + 1)\n if not torch.cuda._is_in_bad_fork():\n torch.cuda.manual_seed_all(seed_)\n else:\n seed = int(seed) % MAX_SEED\n torch.manual_seed(seed)\n\n global _seed\n _seed = seed\n random.seed(seed)\n np.random.seed(seed)\n\n if additional_settings is not None:\n additional_settings = ensure_tuple(additional_settings)\n for func in additional_settings:\n func(seed)\n\n if seed is not None:\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False\n else: # restore the original flags\n torch.backends.cudnn.deterministic = _flag_deterministic\n torch.backends.cudnn.benchmark = _flag_cudnn_benchmark", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_load_submodules_load_submodules.return.submodules_err_mod": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/utils/module.py_load_submodules_load_submodules.return.submodules_err_mod", "embedding": null, "metadata": {"file_path": "monai/utils/module.py", "file_name": "module.py", "file_type": "text/x-python", "category": "implementation", "start_line": 64, "end_line": 82, "span_ids": ["load_submodules"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def load_submodules(basemod, load_all: bool = True, exclude_pattern: str = \"(.*[tT]est.*)|(_.*)\"):\n \"\"\"\n Traverse the source of the module structure starting with module `basemod`, loading all packages plus all files if\n `load_all` is True, excluding anything whose name matches `exclude_pattern`.\n \"\"\"\n submodules = []\n err_mod: List[str] = []\n for importer, name, is_pkg in walk_packages(\n basemod.__path__, prefix=basemod.__name__ + \".\", onerror=err_mod.append\n ):\n if (is_pkg or load_all) and name not in sys.modules and match(exclude_pattern, name) is None:\n try:\n mod = import_module(name)\n importer.find_module(name).load_module(name) # type: ignore\n submodules.append(mod)\n except OptionalImportError:\n pass # could not import the optional deps., they are ignored\n\n return submodules, err_mod", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_glob_try_.finally_.print_f_BUILD_MONAI_CPP_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_glob_try_.finally_.print_f_BUILD_MONAI_CPP_", "embedding": null, "metadata": {"file_path": "setup.py", "file_name": "setup.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 51, "span_ids": ["docstring"], "tokens": 356}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import glob\nimport os\nimport re\nimport sys\nimport warnings\n\nimport pkg_resources\nfrom setuptools import find_packages, setup\n\nimport versioneer\n\n# TODO: debug mode -g -O0, compile test cases\n\nRUN_BUILD = os.getenv(\"BUILD_MONAI\", \"0\") == \"1\"\nFORCE_CUDA = os.getenv(\"FORCE_CUDA\", \"0\") == \"1\" # flag ignored if BUILD_MONAI is False\n\nBUILD_CPP = BUILD_CUDA = False\nTORCH_VERSION = 0\ntry:\n import torch\n\n print(f\"setup.py with torch {torch.__version__}\")\n from torch.utils.cpp_extension import BuildExtension, CppExtension\n\n BUILD_CPP = True\n from torch.utils.cpp_extension import CUDA_HOME, CUDAExtension\n\n BUILD_CUDA = (CUDA_HOME is not None) if torch.cuda.is_available() else FORCE_CUDA\n\n _pt_version = pkg_resources.parse_version(torch.__version__).release # type: ignore[attr-defined]\n if _pt_version is None or len(_pt_version) < 3:\n raise AssertionError(\"unknown torch version\")\n TORCH_VERSION = int(_pt_version[0]) * 10000 + int(_pt_version[1]) * 100 + int(_pt_version[2])\nexcept (ImportError, TypeError, AssertionError, AttributeError) as e:\n warnings.warn(f\"extension build skipped: {e}\")\nfinally:\n if not RUN_BUILD:\n BUILD_CPP = BUILD_CUDA = False\n print(\"Please set environment variable `BUILD_MONAI=1` to enable Cpp/CUDA extension build.\")\n print(f\"BUILD_MONAI_CPP={BUILD_CPP}, BUILD_MONAI_CUDA={BUILD_CUDA}, TORCH_VERSION={TORCH_VERSION}.\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_torch_parallel_backend_omp_flags.return._fopenmp_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_torch_parallel_backend_omp_flags.return._fopenmp_", "embedding": null, "metadata": {"file_path": "setup.py", "file_name": "setup.py", "file_type": "text/x-python", "category": "implementation", "start_line": 54, "end_line": 82, "span_ids": ["torch_parallel_backend", "omp_flags"], "tokens": 214}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def torch_parallel_backend():\n try:\n match = re.search(\n \"^ATen parallel backend: (?P.*)$\",\n torch._C._parallel_info(),\n re.MULTILINE,\n )\n if match is None:\n return None\n backend = match.group(\"backend\")\n if backend == \"OpenMP\":\n return \"AT_PARALLEL_OPENMP\"\n if backend == \"native thread pool\":\n return \"AT_PARALLEL_NATIVE\"\n if backend == \"native thread pool and TBB\":\n return \"AT_PARALLEL_NATIVE_TBB\"\n except (NameError, AttributeError): # no torch or no binaries\n warnings.warn(\"Could not determine torch parallel_info.\")\n return None\n\n\ndef omp_flags():\n if sys.platform == \"win32\":\n return [\"/openmp\"]\n if sys.platform == \"darwin\":\n # https://stackoverflow.com/questions/37362414/\n # return [\"-fopenmp=libiomp5\"]\n return []\n return [\"-fopenmp\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_get_extensions_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/setup.py_get_extensions_", "embedding": null, "metadata": {"file_path": "setup.py", "file_name": "setup.py", "file_type": "text/x-python", "category": "implementation", "start_line": 85, "end_line": 145, "span_ids": ["get_cmds", "get_extensions", "impl:31"], "tokens": 438}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_extensions():\n this_dir = os.path.dirname(os.path.abspath(__file__))\n ext_dir = os.path.join(this_dir, \"monai\", \"csrc\")\n include_dirs = [ext_dir]\n\n source_cpu = glob.glob(os.path.join(ext_dir, \"**\", \"*.cpp\"), recursive=True)\n source_cuda = glob.glob(os.path.join(ext_dir, \"**\", \"*.cu\"), recursive=True)\n\n extension = None\n define_macros = [(f\"{torch_parallel_backend()}\", 1), (\"MONAI_TORCH_VERSION\", TORCH_VERSION)]\n extra_compile_args = {}\n extra_link_args = []\n sources = source_cpu\n if BUILD_CPP:\n extension = CppExtension\n extra_compile_args.setdefault(\"cxx\", [])\n if torch_parallel_backend() == \"AT_PARALLEL_OPENMP\":\n extra_compile_args[\"cxx\"] += omp_flags()\n extra_link_args = omp_flags()\n if BUILD_CUDA:\n extension = CUDAExtension\n sources += source_cuda\n define_macros += [(\"WITH_CUDA\", None)]\n extra_compile_args = {\"cxx\": [], \"nvcc\": []}\n if torch_parallel_backend() == \"AT_PARALLEL_OPENMP\":\n extra_compile_args[\"cxx\"] += omp_flags()\n if extension is None or not sources:\n return [] # compile nothing\n\n ext_modules = [\n extension(\n name=\"monai._C\",\n sources=sources,\n include_dirs=include_dirs,\n define_macros=define_macros,\n extra_compile_args=extra_compile_args,\n extra_link_args=extra_link_args,\n )\n ]\n return ext_modules\n\n\ndef get_cmds():\n cmds = versioneer.get_cmdclass()\n\n if not (BUILD_CPP or BUILD_CUDA):\n return cmds\n\n cmds.update({\"build_ext\": BuildExtension.with_options(no_python_abi_suffix=True)})\n return cmds\n\n\nsetup(\n version=versioneer.get_version(),\n cmdclass=get_cmds(),\n packages=find_packages(exclude=(\"docs\", \"examples\", \"tests\")),\n zip_safe=False,\n package_data={\"monai\": [\"py.typed\"]},\n ext_modules=get_extensions(),\n)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/clang_format_utils.py_os_CLANG_FORMAT_PATH.os_path_join_CLANG_FORMAT": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/clang_format_utils.py_os_CLANG_FORMAT_PATH.os_path_join_CLANG_FORMAT", "embedding": null, "metadata": {"file_path": "tests/clang_format_utils.py", "file_name": "clang_format_utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 15, "end_line": 43, "span_ids": ["docstring"], "tokens": 323}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport platform\nimport stat\nimport sys\n\nfrom monai.apps.utils import download_url\n\n# String representing the host platform (e.g. Linux, Darwin).\nHOST_PLATFORM = platform.system()\n\n# MONAI directory root, derived from the location of this file.\nMONAI_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))\n\n# This dictionary maps each platform to the S3 object URL for its clang-format binary.\nPLATFORM_TO_CF_URL = {\n \"Darwin\": \"https://oss-clang-format.s3.us-east-2.amazonaws.com/mac/clang-format-mojave\",\n \"Linux\": \"https://oss-clang-format.s3.us-east-2.amazonaws.com/linux64/clang-format-linux64\",\n}\n\n# This dictionary maps each platform to a relative path to a file containing its reference hash.\n# github/pytorch/pytorch/tree/63d62d3e44a0a4ec09d94f30381d49b78cc5b095/tools/clang_format_hash\nPLATFORM_TO_HASH = {\n \"Darwin\": \"b24cc8972344c4e01afbbae78d6a414f7638ff6f\",\n \"Linux\": \"9073602de1c4e1748f2feea5a0782417b20e3043\",\n}\n\n# Directory and file paths for the clang-format binary.\nCLANG_FORMAT_DIR = os.path.join(MONAI_ROOT, \".clang-format-bin\")\nCLANG_FORMAT_PATH = os.path.join(CLANG_FORMAT_DIR, \"clang-format\")", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/clang_format_utils.py_get_and_check_clang_format_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/clang_format_utils.py_get_and_check_clang_format_", "embedding": null, "metadata": {"file_path": "tests/clang_format_utils.py", "file_name": "clang_format_utils.py", "file_type": "text/x-python", "category": "implementation", "start_line": 46, "end_line": 80, "span_ids": ["get_and_check_clang_format", "impl:13"], "tokens": 285}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def get_and_check_clang_format():\n \"\"\"\n Download a platform-appropriate clang-format binary if one doesn't already exist at the expected location and verify\n that it is the right binary by checking its SHA1 hash against the expected hash.\n \"\"\"\n # If the host platform is not in PLATFORM_TO_HASH, it is unsupported.\n if HOST_PLATFORM not in PLATFORM_TO_HASH:\n print(\"Unsupported platform: {}\".format(HOST_PLATFORM))\n return False\n if HOST_PLATFORM not in PLATFORM_TO_CF_URL:\n print(\"Unsupported platform: {}\".format(HOST_PLATFORM))\n return False\n\n try:\n download_url(\n PLATFORM_TO_CF_URL[HOST_PLATFORM], CLANG_FORMAT_PATH, PLATFORM_TO_HASH[HOST_PLATFORM], hash_type=\"sha1\"\n )\n except Exception as e:\n print(f\"Download {CLANG_FORMAT_PATH} failed: {e}\")\n print(f\"Please remove {CLANG_FORMAT_PATH} and retry.\")\n return False\n\n # Make sure the binary is executable.\n mode = os.stat(CLANG_FORMAT_PATH).st_mode\n mode |= stat.S_IXUSR\n os.chmod(CLANG_FORMAT_PATH, mode)\n print(\"Using clang-format located at {}\".format(CLANG_FORMAT_PATH))\n\n return True\n\n\nif __name__ == \"__main__\":\n ok = get_and_check_clang_format()\n sys.exit(int(not ok))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/min_tests.py_if___name_____main____": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/min_tests.py_if___name_____main____", "embedding": null, "metadata": {"file_path": "tests/min_tests.py", "file_name": "min_tests.py", "file_type": "text/x-python", "category": "implementation", "start_line": 141, "end_line": 156, "span_ids": ["impl"], "tokens": 125}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "if __name__ == \"__main__\":\n\n # testing import submodules\n from monai.utils.module import load_submodules\n\n _, err_mod = load_submodules(sys.modules[\"monai\"], True)\n if err_mod:\n print(err_mod)\n # expecting that only engines and handlers are not imported\n assert sorted(err_mod) == [\"monai.engines\", \"monai.handlers\"]\n\n # testing all modules\n test_runner = unittest.TextTestRunner(stream=sys.stdout, verbosity=2)\n result = test_runner.run(run_testsuit())\n sys.exit(int(not result.wasSuccessful()))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_check_hash.py_os_TEST_CASE_5._b4dc3c246b298eae37cefdf": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_check_hash.py_os_TEST_CASE_5._b4dc3c246b298eae37cefdf", "embedding": null, "metadata": {"file_path": "tests/test_check_hash.py", "file_name": "test_check_hash.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 29, "span_ids": ["docstring"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.apps import check_hash\n\nTEST_CASE_1 = [\"b94716452086a054208395e8c9d1ae2a\", \"md5\", True]\n\nTEST_CASE_2 = [\"abcdefg\", \"md5\", False]\n\nTEST_CASE_3 = [None, \"md5\", True]\n\nTEST_CASE_4 = [None, \"sha1\", True]\n\nTEST_CASE_5 = [\"b4dc3c246b298eae37cefdfdd2a50b091ffd5e69\", \"sha1\", True]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_check_hash.py_TestCheckMD5_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_check_hash.py_TestCheckMD5_", "embedding": null, "metadata": {"file_path": "tests/test_check_hash.py", "file_name": "test_check_hash.py", "file_type": "text/x-python", "category": "test", "start_line": 32, "end_line": 51, "span_ids": ["TestCheckMD5", "impl:11", "TestCheckMD5.test_result", "TestCheckMD5.test_hash_type_error"], "tokens": 178}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCheckMD5(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_result(self, md5_value, t, expected_result):\n test_image = np.ones((5, 5, 3))\n with tempfile.TemporaryDirectory() as tempdir:\n filename = os.path.join(tempdir, \"test_file.png\")\n test_image.tofile(filename)\n\n result = check_hash(filename, md5_value, hash_type=t)\n self.assertTrue(result == expected_result)\n\n def test_hash_type_error(self):\n with self.assertRaises(NotImplementedError):\n with tempfile.TemporaryDirectory() as tempdir:\n check_hash(tempdir, \"test_hash\", \"test_type\")\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_sys_TestCompose.test_dict_compose.self_assertDictEqual_c_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_sys_TestCompose.test_dict_compose.self_assertDictEqual_c_", "embedding": null, "metadata": {"file_path": "tests/test_compose.py", "file_name": "test_compose.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 58, "span_ids": ["TestCompose", "_RandXform.randomize", "TestCompose.test_empty_compose", "_RandXform", "TestCompose.test_non_dict_compose", "docstring", "TestCompose.test_dict_compose", "_RandXform.__call__"], "tokens": 281}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nimport unittest\n\nfrom monai.data import DataLoader, Dataset\nfrom monai.transforms import AddChannel, Compose\nfrom monai.transforms.transform import Randomizable\nfrom monai.utils import set_determinism\n\n\nclass _RandXform(Randomizable):\n def randomize(self):\n self.val = self.R.random_sample()\n\n def __call__(self, __unused):\n self.randomize()\n return self.val\n\n\nclass TestCompose(unittest.TestCase):\n def test_empty_compose(self):\n c = Compose()\n i = 1\n self.assertEqual(c(i), 1)\n\n def test_non_dict_compose(self):\n def a(i):\n return i + \"a\"\n\n def b(i):\n return i + \"b\"\n\n c = Compose([a, b, a, b])\n self.assertEqual(c(\"\"), \"abab\")\n\n def test_dict_compose(self):\n def a(d):\n d = dict(d)\n d[\"a\"] += 1\n return d\n\n def b(d):\n d = dict(d)\n d[\"b\"] += 1\n return d\n\n c = Compose([a, b, a, b, a])\n self.assertDictEqual(c({\"a\": 0, \"b\": 0}), {\"a\": 3, \"b\": 2})", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_random_compose_TestCompose.test_err_msg.with_self_assertRaisesReg.transforms_42_1_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_random_compose_TestCompose.test_err_msg.with_self_assertRaisesReg.transforms_42_1_", "embedding": null, "metadata": {"file_path": "tests/test_compose.py", "file_name": "test_compose.py", "file_type": "text/x-python", "category": "test", "start_line": 82, "end_line": 116, "span_ids": ["TestCompose.test_err_msg", "TestCompose.test_randomize_warn", "TestCompose.test_random_compose"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCompose(unittest.TestCase):\n\n def test_random_compose(self):\n class _Acc(Randomizable):\n self.rand = 0.0\n\n def randomize(self, data=None):\n self.rand = self.R.rand()\n\n def __call__(self, data):\n self.randomize()\n return self.rand + data\n\n c = Compose([_Acc(), _Acc()])\n self.assertNotAlmostEqual(c(0), c(0))\n c.set_random_state(123)\n self.assertAlmostEqual(c(1), 1.61381597)\n c.set_random_state(223)\n c.randomize()\n self.assertAlmostEqual(c(1), 1.90734751)\n\n def test_randomize_warn(self):\n class _RandomClass(Randomizable):\n def randomize(self, foo1, foo2):\n pass\n\n def __call__(self, data):\n pass\n\n c = Compose([_RandomClass(), _RandomClass()])\n with self.assertWarns(Warning):\n c.randomize()\n\n def test_err_msg(self):\n transforms = Compose([abs, AddChannel(), round])\n with self.assertRaisesRegex(Exception, \"AddChannel\"):\n transforms(42.1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_data_loader_TestCompose.test_data_loader.set_determinism_None_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_compose.py_TestCompose.test_data_loader_TestCompose.test_data_loader.set_determinism_None_", "embedding": null, "metadata": {"file_path": "tests/test_compose.py", "file_name": "test_compose.py", "file_type": "text/x-python", "category": "test", "start_line": 114, "end_line": 135, "span_ids": ["TestCompose.test_data_loader"], "tokens": 231}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestCompose(unittest.TestCase):\n\n def test_data_loader(self):\n xform_1 = Compose([_RandXform()])\n train_ds = Dataset([1], transform=xform_1)\n\n xform_1.set_random_state(123)\n out_1 = train_ds[0]\n self.assertAlmostEqual(out_1, 0.2045649)\n\n set_determinism(seed=123)\n train_loader = DataLoader(train_ds, num_workers=0)\n out_1 = next(iter(train_loader))\n self.assertAlmostEqual(out_1.cpu().item(), 0.84291356)\n\n if sys.platform != \"win32\": # skip multi-worker tests on win32\n train_loader = DataLoader(train_ds, num_workers=1)\n out_1 = next(iter(train_loader))\n self.assertAlmostEqual(out_1.cpu().item(), 0.180814653)\n\n train_loader = DataLoader(train_ds, num_workers=2)\n out_1 = next(iter(train_loader))\n self.assertAlmostEqual(out_1.cpu().item(), 0.04293707)\n set_determinism(None)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_unittest_TestConvolution2D.test_transpose2.self_assertEqual_out_shap": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_unittest_TestConvolution2D.test_transpose2.self_assertEqual_out_shap", "embedding": null, "metadata": {"file_path": "tests/test_convolutions.py", "file_name": "test_convolutions.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 66, "span_ids": ["TestConvolution2D.test_conv1", "TestConvolution2D.test_conv_only1", "TestConvolution2D.test_transpose2", "TestConvolution2D.test_dilation1", "TestConvolution2D.test_dropout1", "TestConvolution2D", "TestConvolution2D.test_transpose1", "docstring", "TestConvolution2D.test_stride1", "TestConvolution2D.test_conv1_no_acti"], "tokens": 628}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nfrom monai.networks.blocks import Convolution, ResidualUnit\nfrom tests.utils import TorchImageTestCase2D, TorchImageTestCase3D\n\n\nclass TestConvolution2D(TorchImageTestCase2D):\n def test_conv1(self):\n conv = Convolution(2, self.input_channels, self.output_channels)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n def test_conv1_no_acti(self):\n conv = Convolution(2, self.input_channels, self.output_channels, act=None)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n def test_conv_only1(self):\n conv = Convolution(2, self.input_channels, self.output_channels, conv_only=True)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n def test_stride1(self):\n for strides in [2, [2, 2], (2, 2)]:\n conv = Convolution(2, self.input_channels, self.output_channels, strides=strides)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0] // 2, self.im_shape[1] // 2)\n self.assertEqual(out.shape, expected_shape)\n\n def test_dilation1(self):\n conv = Convolution(2, self.input_channels, self.output_channels, dilation=3)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n def test_dropout1(self):\n conv = Convolution(2, self.input_channels, self.output_channels, dropout=0.15)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n def test_transpose1(self):\n conv = Convolution(2, self.input_channels, self.output_channels, is_transposed=True)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0], self.im_shape[1])\n self.assertEqual(out.shape, expected_shape)\n\n def test_transpose2(self):\n conv = Convolution(2, self.input_channels, self.output_channels, strides=2, is_transposed=True)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[0] * 2, self.im_shape[1] * 2)\n self.assertEqual(out.shape, expected_shape)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D_TestConvolution3D.test_conv_only1.self_assertEqual_out_shap": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D_TestConvolution3D.test_conv_only1.self_assertEqual_out_shap", "embedding": null, "metadata": {"file_path": "tests/test_convolutions.py", "file_name": "test_convolutions.py", "file_type": "text/x-python", "category": "test", "start_line": 69, "end_line": 86, "span_ids": ["TestConvolution3D", "TestConvolution3D.test_conv1_no_acti", "TestConvolution3D.test_conv1", "TestConvolution3D.test_conv_only1"], "tokens": 252}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConvolution3D(TorchImageTestCase3D):\n def test_conv1(self):\n conv = Convolution(3, self.input_channels, self.output_channels, dropout=0.1, adn_ordering=\"DAN\")\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[1], self.im_shape[0], self.im_shape[2])\n self.assertEqual(out.shape, expected_shape)\n\n def test_conv1_no_acti(self):\n conv = Convolution(3, self.input_channels, self.output_channels, act=None, adn_ordering=\"AND\")\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[1], self.im_shape[0], self.im_shape[2])\n self.assertEqual(out.shape, expected_shape)\n\n def test_conv_only1(self):\n conv = Convolution(3, self.input_channels, self.output_channels, conv_only=True)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[1], self.im_shape[0], self.im_shape[2])\n self.assertEqual(out.shape, expected_shape)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D.test_stride1_TestConvolution3D.test_stride1.for_strides_in_2_2_2_.self_assertEqual_out_shap": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D.test_stride1_TestConvolution3D.test_stride1.for_strides_in_2_2_2_.self_assertEqual_out_shap", "embedding": null, "metadata": {"file_path": "tests/test_convolutions.py", "file_name": "test_convolutions.py", "file_type": "text/x-python", "category": "test", "start_line": 88, "end_line": 99, "span_ids": ["TestConvolution3D.test_stride1"], "tokens": 131}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConvolution3D(TorchImageTestCase3D):\n\n def test_stride1(self):\n for strides in [2, (2, 2, 2), [2, 2, 2]]:\n conv = Convolution(3, self.input_channels, self.output_channels, strides=strides)\n out = conv(self.imt)\n expected_shape = (\n 1,\n self.output_channels,\n self.im_shape[1] // 2,\n self.im_shape[0] // 2,\n self.im_shape[2] // 2,\n )\n self.assertEqual(out.shape, expected_shape)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D.test_dilation1_TestConvolution3D.test_transpose2.self_assertEqual_out_shap": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_convolutions.py_TestConvolution3D.test_dilation1_TestConvolution3D.test_transpose2.self_assertEqual_out_shap", "embedding": null, "metadata": {"file_path": "tests/test_convolutions.py", "file_name": "test_convolutions.py", "file_type": "text/x-python", "category": "test", "start_line": 101, "end_line": 123, "span_ids": ["TestConvolution3D.test_transpose1", "TestConvolution3D.test_dropout1", "TestConvolution3D.test_dilation1", "TestConvolution3D.test_transpose2"], "tokens": 328}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestConvolution3D(TorchImageTestCase3D):\n\n def test_dilation1(self):\n conv = Convolution(3, self.input_channels, self.output_channels, dilation=3)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[1], self.im_shape[0], self.im_shape[2])\n self.assertEqual(out.shape, expected_shape)\n\n def test_dropout1(self):\n conv = Convolution(3, self.input_channels, self.output_channels, dropout=0.15)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[1], self.im_shape[0], self.im_shape[2])\n self.assertEqual(out.shape, expected_shape)\n\n def test_transpose1(self):\n conv = Convolution(3, self.input_channels, self.output_channels, is_transposed=True)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[1], self.im_shape[0], self.im_shape[2])\n self.assertEqual(out.shape, expected_shape)\n\n def test_transpose2(self):\n conv = Convolution(3, self.input_channels, self.output_channels, strides=2, is_transposed=True)\n out = conv(self.imt)\n expected_shape = (1, self.output_channels, self.im_shape[1] * 2, self.im_shape[0] * 2, self.im_shape[2] * 2)\n self.assertEqual(out.shape, expected_shape)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foreground.py_unittest_TEST_CASE_4._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_crop_foreground.py_unittest_TEST_CASE_4._", "embedding": null, "metadata": {"file_path": "tests/test_crop_foreground.py", "file_name": "test_crop_foreground.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["impl:7", "docstring"], "tokens": 609}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import CropForeground\n\nTEST_CASE_1 = [\n {\"select_fn\": lambda x: x > 0, \"channel_indices\": None, \"margin\": 0},\n np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]]),\n np.array([[[1, 2, 1], [2, 3, 2], [1, 2, 1]]]),\n]\n\nTEST_CASE_2 = [\n {\"select_fn\": lambda x: x > 1, \"channel_indices\": None, \"margin\": 0},\n np.array([[[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 3, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]]]),\n np.array([[[3]]]),\n]\n\nTEST_CASE_3 = [\n {\"select_fn\": lambda x: x > 0, \"channel_indices\": 0, \"margin\": 0},\n np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 1, 2, 1, 0], [0, 0, 0, 0, 0]]]),\n np.array([[[1, 2, 1], [2, 3, 2], [1, 2, 1]]]),\n]\n\nTEST_CASE_4 = [\n {\"select_fn\": lambda x: x > 0, \"channel_indices\": None, \"margin\": 1},\n np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]]),\n np.array([[[0, 0, 0, 0, 0], [0, 1, 2, 1, 0], [0, 2, 3, 2, 0], [0, 0, 0, 0, 0]]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet.py_unittest_TEST_CASE_DEEP_SUPERVISION._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet.py_unittest_TEST_CASE_DEEP_SUPERVISION._", "embedding": null, "metadata": {"file_path": "tests/test_dynunet.py", "file_name": "test_dynunet.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 77, "span_ids": ["impl:36", "docstring"], "tokens": 594}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\nfrom typing import Any, Sequence, Union\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.nets import DynUNet\nfrom tests.utils import test_script_save\n\ndevice = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n\nstrides: Sequence[Union[Sequence[int], int]]\nkernel_size: Sequence[Any]\nexpected_shape: Sequence[Any]\n\nTEST_CASE_DYNUNET_2D = []\nfor kernel_size in [(3, 3, 3, 1), ((3, 1), 1, (3, 3), (1, 1))]:\n for strides in [(1, 1, 1, 1), (2, 2, 2, 1)]:\n for in_channels in [2, 3]:\n for res_block in [True, False]:\n out_channels = 2\n in_size = 64\n spatial_dims = 2\n expected_shape = (1, out_channels, *[in_size // strides[0]] * spatial_dims)\n test_case = [\n {\n \"spatial_dims\": spatial_dims,\n \"in_channels\": in_channels,\n \"out_channels\": out_channels,\n \"kernel_size\": kernel_size,\n \"strides\": strides,\n \"upsample_kernel_size\": strides[1:],\n \"norm_name\": \"batch\",\n \"deep_supervision\": False,\n \"res_block\": res_block,\n },\n (1, in_channels, in_size, in_size),\n expected_shape,\n ]\n TEST_CASE_DYNUNET_2D.append(test_case)\n\nTEST_CASE_DYNUNET_3D = [] # in 3d cases, also test anisotropic kernel/strides\nfor out_channels in [2, 3]:\n for res_block in [True, False]:\n in_channels = 1\n in_size = 64\n expected_shape = (1, out_channels, 64, 32, 64)\n test_case = [\n {\n \"spatial_dims\": 3,\n \"in_channels\": in_channels,\n \"out_channels\": out_channels,\n \"kernel_size\": (3, (1, 1, 3), 3, 3),\n \"strides\": ((1, 2, 1), 2, 2, 1),\n \"upsample_kernel_size\": (2, 2, 1),\n \"norm_name\": \"instance\",\n \"deep_supervision\": False,\n \"res_block\": res_block,\n },\n (1, in_channels, in_size, in_size, in_size),\n expected_shape,\n ]\n TEST_CASE_DYNUNET_3D.append(test_case)\n\nTEST_CASE_DEEP_SUPERVISION = []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet_block.py_unittest_None_1.for_kernel_size_in_1_3_.for_stride_in_1_2_.for_norm_name_in_batch_.for_in_size_in_15_16_.TEST_UP_BLOCK_append_test": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_dynunet_block.py_unittest_None_1.for_kernel_size_in_1_3_.for_stride_in_1_2_.for_norm_name_in_batch_.for_in_size_in_15_16_.TEST_UP_BLOCK_append_test", "embedding": null, "metadata": {"file_path": "tests/test_dynunet_block.py", "file_name": "test_dynunet_block.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 67, "span_ids": ["docstring"], "tokens": 478}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.blocks.dynunet_block import UnetBasicBlock, UnetResBlock, UnetUpBlock, get_padding\nfrom tests.utils import test_script_save\n\nTEST_CASE_RES_BASIC_BLOCK = []\nfor spatial_dims in range(2, 4):\n for kernel_size in [1, 3]:\n for stride in [1, 2]:\n for norm_name in [\"group\", \"batch\", \"instance\"]:\n for in_size in [15, 16]:\n padding = get_padding(kernel_size, stride)\n if not isinstance(padding, int):\n padding = padding[0]\n out_size = int((in_size + 2 * padding - kernel_size) / stride) + 1\n test_case = [\n {\n \"spatial_dims\": spatial_dims,\n \"in_channels\": 16,\n \"out_channels\": 16,\n \"kernel_size\": kernel_size,\n \"norm_name\": norm_name,\n \"stride\": stride,\n },\n (1, 16, *([in_size] * spatial_dims)),\n (1, 16, *([out_size] * spatial_dims)),\n ]\n TEST_CASE_RES_BASIC_BLOCK.append(test_case)\n\nTEST_UP_BLOCK = []\nin_channels, out_channels = 4, 2\nfor spatial_dims in range(2, 4):\n for kernel_size in [1, 3]:\n for stride in [1, 2]:\n for norm_name in [\"batch\", \"instance\"]:\n for in_size in [15, 16]:\n out_size = in_size * stride\n test_case = [\n {\n \"spatial_dims\": spatial_dims,\n \"in_channels\": in_channels,\n \"out_channels\": out_channels,\n \"kernel_size\": kernel_size,\n \"norm_name\": norm_name,\n \"stride\": stride,\n \"upsample_kernel_size\": stride,\n },\n (1, in_channels, *([in_size] * spatial_dims)),\n (1, out_channels, *([out_size] * spatial_dims)),\n (1, out_channels, *([in_size * stride] * spatial_dims)),\n ]\n TEST_UP_BLOCK.append(test_case)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indices.py_unittest_TEST_CASE_5._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indices.py_unittest_TEST_CASE_5._", "embedding": null, "metadata": {"file_path": "tests/test_fg_bg_to_indices.py", "file_name": "test_fg_bg_to_indices.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 57, "span_ids": ["impl:9", "docstring"], "tokens": 615}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import FgBgToIndices\n\nTEST_CASE_1 = [\n {\"image_threshold\": 0.0, \"output_shape\": None},\n np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]]),\n None,\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 4, 8]),\n]\n\nTEST_CASE_2 = [\n {\"image_threshold\": 0.0, \"output_shape\": None},\n np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]]),\n np.array([[[1, 1, 1], [1, 0, 1], [1, 1, 1]]]),\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\nTEST_CASE_3 = [\n {\"image_threshold\": 1.0, \"output_shape\": None},\n np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]]),\n np.array([[[3, 3, 3], [3, 1, 3], [3, 3, 3]]]),\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\nTEST_CASE_4 = [\n {\"image_threshold\": 1.0, \"output_shape\": None},\n np.array([[[0, 1, 2], [3, 0, 4], [5, 6, 0]]]),\n np.array([[[3, 3, 3], [3, 1, 3], [3, 3, 3]]]),\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\nTEST_CASE_5 = [\n {\"image_threshold\": 1.0, \"output_shape\": [3, 3]},\n np.array([[[0, 1, 2], [3, 0, 4], [5, 6, 0]]]),\n np.array([[[3, 3, 3], [3, 1, 3], [3, 3, 3]]]),\n np.array([[0, 1], [0, 2], [1, 0], [1, 2], [2, 0], [2, 1]]),\n np.array([[0, 0], [2, 2]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indices.py_TestFgBgToIndices_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indices.py_TestFgBgToIndices_", "embedding": null, "metadata": {"file_path": "tests/test_fg_bg_to_indices.py", "file_name": "test_fg_bg_to_indices.py", "file_type": "text/x-python", "category": "test", "start_line": 60, "end_line": 70, "span_ids": ["TestFgBgToIndices", "impl:11", "TestFgBgToIndices.test_type_shape"], "tokens": 118}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFgBgToIndices(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_type_shape(self, input_data, label, image, expected_fg, expected_bg):\n fg_indices, bg_indices = FgBgToIndices(**input_data)(label, image)\n np.testing.assert_allclose(fg_indices, expected_fg)\n np.testing.assert_allclose(bg_indices, expected_bg)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indicesd.py_unittest_TEST_CASE_5._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indicesd.py_unittest_TEST_CASE_5._", "embedding": null, "metadata": {"file_path": "tests/test_fg_bg_to_indicesd.py", "file_name": "test_fg_bg_to_indicesd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 52, "span_ids": ["impl:7", "docstring"], "tokens": 705}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import FgBgToIndicesd\n\nTEST_CASE_1 = [\n {\"keys\": \"label\", \"image_key\": None, \"image_threshold\": 0.0, \"output_shape\": None},\n {\"label\": np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]])},\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 4, 8]),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"label\", \"image_key\": \"image\", \"image_threshold\": 0.0, \"output_shape\": None},\n {\"label\": np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]]), \"image\": np.array([[[1, 1, 1], [1, 0, 1], [1, 1, 1]]])},\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\nTEST_CASE_3 = [\n {\"keys\": \"label\", \"image_key\": \"image\", \"image_threshold\": 1.0, \"output_shape\": None},\n {\"label\": np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]]), \"image\": np.array([[[3, 3, 3], [3, 1, 3], [3, 3, 3]]])},\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\nTEST_CASE_4 = [\n {\"keys\": \"label\", \"image_key\": \"image\", \"image_threshold\": 1.0, \"output_shape\": None},\n {\"label\": np.array([[[0, 1, 2], [3, 0, 4], [5, 6, 0]]]), \"image\": np.array([[[3, 3, 3], [3, 1, 3], [3, 3, 3]]])},\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\nTEST_CASE_5 = [\n {\"keys\": \"label\", \"image_key\": \"image\", \"image_threshold\": 1.0, \"output_shape\": [3, 3]},\n {\"label\": np.array([[[0, 1, 2], [3, 0, 4], [5, 6, 0]]]), \"image\": np.array([[[3, 3, 3], [3, 1, 3], [3, 3, 3]]])},\n np.array([[0, 1], [0, 2], [1, 0], [1, 2], [2, 0], [2, 1]]),\n np.array([[0, 0], [2, 2]]),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indicesd.py_TestFgBgToIndicesd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_fg_bg_to_indicesd.py_TestFgBgToIndicesd_", "embedding": null, "metadata": {"file_path": "tests/test_fg_bg_to_indicesd.py", "file_name": "test_fg_bg_to_indicesd.py", "file_type": "text/x-python", "category": "test", "start_line": 55, "end_line": 65, "span_ids": ["impl:11", "TestFgBgToIndicesd", "TestFgBgToIndicesd.test_type_shape"], "tokens": 117}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestFgBgToIndicesd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_type_shape(self, input_data, data, expected_fg, expected_bg):\n result = FgBgToIndicesd(**input_data)(data)\n np.testing.assert_allclose(result[\"label_fg_indices\"], expected_fg)\n np.testing.assert_allclose(result[\"label_bg_indices\"], expected_bg)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_unittest_TEST_CASES_NORM_F": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_unittest_TEST_CASES_NORM_F", "embedding": null, "metadata": {"file_path": "tests/test_gaussian.py", "file_name": "test_gaussian.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 221, "span_ids": ["docstring"], "tokens": 43}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks.layers.convutils import gaussian_1d\n\nTEST_CASES_NORM_F =\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d_TestGaussian1d.test_gaussian.None_2": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d_TestGaussian1d.test_gaussian.None_2", "embedding": null, "metadata": {"file_path": "tests/test_gaussian.py", "file_name": "test_gaussian.py", "file_type": "text/x-python", "category": "test", "start_line": 224, "end_line": 253, "span_ids": ["TestGaussian1d.test_gaussian", "TestGaussian1d"], "tokens": 248}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGaussian1d(unittest.TestCase):\n def test_gaussian(self):\n np.testing.assert_allclose(\n gaussian_1d(0.5, 8),\n torch.tensor(\n [\n 0.0000e00,\n 2.9802e-07,\n 1.3496e-03,\n 1.5731e-01,\n 6.8269e-01,\n 1.5731e-01,\n 1.3496e-03,\n 2.9802e-07,\n 0.0000e00,\n ]\n ),\n rtol=1e-4,\n )\n\n np.testing.assert_allclose(\n gaussian_1d(1, 1),\n torch.tensor([0.24173, 0.382925, 0.24173]),\n rtol=1e-4,\n )\n np.testing.assert_allclose(\n gaussian_1d(1, 1, normalize=True),\n torch.tensor([0.2790, 0.4420, 0.2790]),\n rtol=1e-4,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d.test_scalespace_gaussian_TestGaussian1d.test_scalespace_gaussian.None_3": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d.test_scalespace_gaussian_TestGaussian1d.test_scalespace_gaussian.None_3", "embedding": null, "metadata": {"file_path": "tests/test_gaussian.py", "file_name": "test_gaussian.py", "file_type": "text/x-python", "category": "test", "start_line": 255, "end_line": 304, "span_ids": ["TestGaussian1d.test_scalespace_gaussian"], "tokens": 389}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGaussian1d(unittest.TestCase):\n\n def test_scalespace_gaussian(self):\n np.testing.assert_allclose(\n gaussian_1d(0.5, 8, \"scalespace\"),\n torch.tensor(\n [\n 7.9472e-06,\n 2.5451e-04,\n 6.1161e-03,\n 9.8113e-02,\n 7.9102e-01,\n 9.8113e-02,\n 6.1161e-03,\n 2.5451e-04,\n 7.9472e-06,\n ]\n ),\n rtol=1e-4,\n )\n\n np.testing.assert_allclose(\n gaussian_1d(1, 1, \"scalespace\"),\n torch.tensor([0.20791, 0.46576, 0.20791]),\n rtol=1e-3,\n )\n\n np.testing.assert_allclose(\n gaussian_1d(1, 1, \"scalespace\", normalize=True),\n torch.tensor([0.2358, 0.5283, 0.2358]),\n rtol=1e-3,\n )\n\n np.testing.assert_allclose(\n gaussian_1d(5, 1, \"scalespace\"),\n torch.tensor(\n [\n 0.048225,\n 0.057891,\n 0.06675,\n 0.073911,\n 0.078576,\n 0.080197,\n 0.078576,\n 0.073911,\n 0.06675,\n 0.057891,\n 0.048225,\n ]\n ),\n rtol=1e-3,\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d.test_norm_false_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian.py_TestGaussian1d.test_norm_false_", "embedding": null, "metadata": {"file_path": "tests/test_gaussian.py", "file_name": "test_gaussian.py", "file_type": "text/x-python", "category": "test", "start_line": 306, "end_line": 327, "span_ids": ["TestGaussian1d.test_norm_false", "impl:3", "TestGaussian1d.test_wrong_sigma"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestGaussian1d(unittest.TestCase):\n\n @parameterized.expand(TEST_CASES_NORM_F)\n def test_norm_false(self, variance, expected):\n extent = 6\n atol = 1e-4\n sigma = np.sqrt(variance)\n k_erf = gaussian_1d(sigma, truncated=extent / sigma, approx=\"erf\", normalize=False).numpy()\n k_sampled = gaussian_1d(sigma, truncated=extent / sigma, approx=\"sampled\").numpy()\n k_scalespace = gaussian_1d(sigma, truncated=extent / sigma, approx=\"scalespace\").numpy()\n np.testing.assert_allclose(k_erf, expected[0], atol=atol)\n np.testing.assert_allclose(k_sampled, expected[1], atol=atol)\n np.testing.assert_allclose(k_scalespace, expected[2], atol=atol)\n\n def test_wrong_sigma(self):\n with self.assertRaises(ValueError):\n gaussian_1d(1, -10)\n with self.assertRaises(NotImplementedError):\n gaussian_1d(1, 10, \"wrong_arg\")\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase_GaussianFilterTestCase.test_1d.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_gaussian_filter.py_GaussianFilterTestCase_GaussianFilterTestCase.test_1d.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_gaussian_filter.py", "file_name": "test_gaussian_filter.py", "file_type": "text/x-python", "category": "test", "start_line": 86, "end_line": 109, "span_ids": ["GaussianFilterTestCase.test_1d", "GaussianFilterTestCase"], "tokens": 190}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class GaussianFilterTestCase(unittest.TestCase):\n def test_1d(self):\n a = torch.ones(1, 8, 10)\n g = GaussianFilter(1, 3, 3).to(torch.device(\"cpu:0\"))\n expected = np.array(\n [\n [\n [\n 0.5654129,\n 0.68915915,\n 0.79146194,\n 0.8631974,\n 0.8998163,\n 0.8998163,\n 0.8631973,\n 0.79146194,\n 0.6891592,\n 0.5654129,\n ]\n ]\n ]\n )\n expected = np.tile(expected, (1, 8, 1))\n np.testing.assert_allclose(g(a).cpu().numpy(), expected, rtol=1e-5)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_smartcache.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_handler_smartcache.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_handler_smartcache.py", "file_name": "test_handler_smartcache.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 48, "span_ids": ["TestHandlerSmartCache.test_content", "TestHandlerSmartCache", "impl", "docstring"], "tokens": 281}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom ignite.engine import Engine\n\nfrom monai.data import SmartCacheDataset\nfrom monai.handlers import SmartCacheHandler\n\n\nclass TestHandlerSmartCache(unittest.TestCase):\n def test_content(self):\n data = [0, 1, 2, 3, 4, 5, 6, 7, 8]\n expected = [\n [0, 1, 2, 3, 4],\n [1, 2, 3, 4, 5],\n [2, 3, 4, 5, 6],\n [3, 4, 5, 6, 7],\n [4, 5, 6, 7, 8],\n ]\n\n # set up engine\n def _train_func(engine, batch):\n self.assertListEqual(batch.tolist(), expected[engine.state.epoch - 1])\n\n engine = Engine(_train_func)\n\n # set up testing handler\n dataset = SmartCacheDataset(data, transform=None, replace_rate=0.2, cache_num=5, shuffle=False)\n data_loader = torch.utils.data.DataLoader(dataset, batch_size=5)\n SmartCacheHandler(dataset).attach(engine)\n\n engine.run(data_loader, max_epochs=5)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hausdorff_distance.py_unittest_create_spherical_seg_3d.return.image": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_hausdorff_distance.py_unittest_create_spherical_seg_3d.return.image", "embedding": null, "metadata": {"file_path": "tests/test_hausdorff_distance.py", "file_name": "test_hausdorff_distance.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 48, "span_ids": ["create_spherical_seg_3d", "docstring"], "tokens": 321}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\nfrom typing import Tuple\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.metrics import HausdorffDistanceMetric\n\n\ndef create_spherical_seg_3d(\n radius: float = 20.0,\n centre: Tuple[int, int, int] = (49, 49, 49),\n im_shape: Tuple[int, int, int] = (99, 99, 99),\n) -> np.ndarray:\n \"\"\"\n Return a 3D image with a sphere inside. Voxel values will be\n 1 inside the sphere, and 0 elsewhere.\n\n Args:\n radius: radius of sphere (in terms of number of voxels, can be partial)\n centre: location of sphere centre.\n im_shape: shape of image to create\n\n See also:\n :py:meth:`~create_test_image_3d`\n \"\"\"\n # Create image\n image = np.zeros(im_shape, dtype=np.int32)\n spy, spx, spz = np.ogrid[\n -centre[0] : im_shape[0] - centre[0], -centre[1] : im_shape[1] - centre[1], -centre[2] : im_shape[2] - centre[2]\n ]\n circle = (spx * spx + spy * spy + spz * spz) <= radius * radius\n\n image[circle] = 1\n image[~circle] = 0\n return image", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_is_supported_format.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_is_supported_format.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_is_supported_format.py", "file_name": "test_is_supported_format.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 57, "span_ids": ["impl:13", "TestIsSupportedFormat.test_value", "TestIsSupportedFormat", "docstring"], "tokens": 319}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nfrom parameterized import parameterized\n\nfrom monai.data import is_supported_format\n\nTEST_CASE_1 = [\n {\"filename\": \"testfile.nii.gz\", \"suffixes\": [\"nii\", \"nii.gz\"]},\n True,\n]\n\nTEST_CASE_2 = [\n {\"filename\": \"./testfile.nii.gz\", \"suffixes\": [\"nii\", \"nii.gz\"]},\n True,\n]\n\nTEST_CASE_3 = [\n {\"filename\": \"./test.data/file.nii.gz\", \"suffixes\": [\"nii\", \"nii.gz\"]},\n True,\n]\n\nTEST_CASE_4 = [\n {\"filename\": \"./test.data/file.nii\", \"suffixes\": [\"nii\", \"nii.gz\"]},\n True,\n]\n\nTEST_CASE_5 = [\n {\"filename\": \"C:\\\\documents\\\\testfile.nii.gz\", \"suffixes\": [\"nii\", \"nii.gz\"]},\n True,\n]\n\nTEST_CASE_6 = [\n {\"filename\": \"1.3.12.2.1107.5.4.4.145.nii.gz\", \"suffixes\": [\"nii.gz\"]},\n True,\n]\n\n\nclass TestIsSupportedFormat(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5, TEST_CASE_6])\n def test_value(self, input_param, result):\n self.assertEqual(is_supported_format(**input_param), result)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_json_TestLoadDecathlonDatalist.test_seg_values.with_tempfile_TemporaryDi.None_1": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_json_TestLoadDecathlonDatalist.test_seg_values.with_tempfile_TemporaryDi.None_1", "embedding": null, "metadata": {"file_path": "tests/test_load_decathlon_datalist.py", "file_name": "test_load_decathlon_datalist.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 39, "span_ids": ["TestLoadDecathlonDatalist", "TestLoadDecathlonDatalist.test_seg_values", "docstring"], "tokens": 289}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import json\nimport os\nimport tempfile\nimport unittest\n\nfrom monai.data import load_decathlon_datalist\n\n\nclass TestLoadDecathlonDatalist(unittest.TestCase):\n def test_seg_values(self):\n with tempfile.TemporaryDirectory() as tempdir:\n test_data = {\n \"name\": \"Spleen\",\n \"description\": \"Spleen Segmentation\",\n \"labels\": {\"0\": \"background\", \"1\": \"spleen\"},\n \"training\": [\n {\"image\": \"spleen_19.nii.gz\", \"label\": \"spleen_19.nii.gz\"},\n {\"image\": \"spleen_31.nii.gz\", \"label\": \"spleen_31.nii.gz\"},\n ],\n \"test\": [\"spleen_15.nii.gz\", \"spleen_23.nii.gz\"],\n }\n json_str = json.dumps(test_data)\n file_path = os.path.join(tempdir, \"test_data.json\")\n with open(file_path, \"w\") as json_file:\n json_file.write(json_str)\n result = load_decathlon_datalist(file_path, True, \"training\", tempdir)\n self.assertEqual(result[0][\"image\"], os.path.join(tempdir, \"spleen_19.nii.gz\"))\n self.assertEqual(result[0][\"label\"], os.path.join(tempdir, \"spleen_19.nii.gz\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_TestLoadDecathlonDatalist.test_cls_values_TestLoadDecathlonDatalist.test_cls_values.with_tempfile_TemporaryDi.None_1": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_TestLoadDecathlonDatalist.test_cls_values_TestLoadDecathlonDatalist.test_cls_values.with_tempfile_TemporaryDi.None_1", "embedding": null, "metadata": {"file_path": "tests/test_load_decathlon_datalist.py", "file_name": "test_load_decathlon_datalist.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 56, "span_ids": ["TestLoadDecathlonDatalist.test_cls_values"], "tokens": 225}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadDecathlonDatalist(unittest.TestCase):\n\n def test_cls_values(self):\n with tempfile.TemporaryDirectory() as tempdir:\n test_data = {\n \"name\": \"ChestXRay\",\n \"description\": \"Chest X-ray classification\",\n \"labels\": {\"0\": \"background\", \"1\": \"chest\"},\n \"training\": [{\"image\": \"chest_19.nii.gz\", \"label\": 0}, {\"image\": \"chest_31.nii.gz\", \"label\": 1}],\n \"test\": [\"chest_15.nii.gz\", \"chest_23.nii.gz\"],\n }\n json_str = json.dumps(test_data)\n file_path = os.path.join(tempdir, \"test_data.json\")\n with open(file_path, \"w\") as json_file:\n json_file.write(json_str)\n result = load_decathlon_datalist(file_path, False, \"training\", tempdir)\n self.assertEqual(result[0][\"image\"], os.path.join(tempdir, \"chest_19.nii.gz\"))\n self.assertEqual(result[0][\"label\"], 0)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_TestLoadDecathlonDatalist.test_seg_no_basedir_TestLoadDecathlonDatalist.test_seg_no_basedir.with_tempfile_TemporaryDi.None_1": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_decathlon_datalist.py_TestLoadDecathlonDatalist.test_seg_no_basedir_TestLoadDecathlonDatalist.test_seg_no_basedir.with_tempfile_TemporaryDi.None_1", "embedding": null, "metadata": {"file_path": "tests/test_load_decathlon_datalist.py", "file_name": "test_load_decathlon_datalist.py", "file_type": "text/x-python", "category": "test", "start_line": 58, "end_line": 82, "span_ids": ["TestLoadDecathlonDatalist.test_seg_no_basedir"], "tokens": 314}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadDecathlonDatalist(unittest.TestCase):\n\n def test_seg_no_basedir(self):\n with tempfile.TemporaryDirectory() as tempdir:\n test_data = {\n \"name\": \"Spleen\",\n \"description\": \"Spleen Segmentation\",\n \"labels\": {\"0\": \"background\", \"1\": \"spleen\"},\n \"training\": [\n {\n \"image\": os.path.join(tempdir, \"spleen_19.nii.gz\"),\n \"label\": os.path.join(tempdir, \"spleen_19.nii.gz\"),\n },\n {\n \"image\": os.path.join(tempdir, \"spleen_31.nii.gz\"),\n \"label\": os.path.join(tempdir, \"spleen_31.nii.gz\"),\n },\n ],\n \"test\": [os.path.join(tempdir, \"spleen_15.nii.gz\"), os.path.join(tempdir, \"spleen_23.nii.gz\")],\n }\n json_str = json.dumps(test_data)\n file_path = os.path.join(tempdir, \"test_data.json\")\n with open(file_path, \"w\") as json_file:\n json_file.write(json_str)\n result = load_decathlon_datalist(file_path, True, \"training\", None)\n self.assertEqual(result[0][\"image\"], os.path.join(tempdir, \"spleen_19.nii.gz\"))\n self.assertEqual(result[0][\"label\"], os.path.join(tempdir, \"spleen_19.nii.gz\"))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage_TestLoadImage.test_nibabel_reader.with_tempfile_TemporaryDi.self_assertTupleEqual_res": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage_TestLoadImage.test_nibabel_reader.with_tempfile_TemporaryDi.self_assertTupleEqual_res", "embedding": null, "metadata": {"file_path": "tests/test_load_image.py", "file_name": "test_load_image.py", "file_type": "text/x-python", "category": "test", "start_line": 78, "end_line": 94, "span_ids": ["TestLoadImage", "TestLoadImage.test_nibabel_reader"], "tokens": 227}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadImage(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5])\n def test_nibabel_reader(self, input_param, filenames, expected_shape):\n test_image = np.random.rand(128, 128, 128)\n with tempfile.TemporaryDirectory() as tempdir:\n for i, name in enumerate(filenames):\n filenames[i] = os.path.join(tempdir, name)\n nib.save(nib.Nifti1Image(test_image, np.eye(4)), filenames[i])\n result = LoadImage(**input_param)(filenames)\n\n if isinstance(result, tuple):\n result, header = result\n self.assertTrue(\"affine\" in header)\n self.assertEqual(header[\"filename_or_obj\"], os.path.join(tempdir, \"test_image.nii.gz\"))\n np.testing.assert_allclose(header[\"affine\"], np.eye(4))\n np.testing.assert_allclose(header[\"original_affine\"], np.eye(4))\n self.assertTupleEqual(result.shape, expected_shape)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_itk_reader_TestLoadImage.test_itk_reader.with_tempfile_TemporaryDi.self_assertTupleEqual_res": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_itk_reader_TestLoadImage.test_itk_reader.with_tempfile_TemporaryDi.self_assertTupleEqual_res", "embedding": null, "metadata": {"file_path": "tests/test_load_image.py", "file_name": "test_load_image.py", "file_type": "text/x-python", "category": "test", "start_line": 96, "end_line": 112, "span_ids": ["TestLoadImage.test_itk_reader"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadImage(unittest.TestCase):\n\n @parameterized.expand([TEST_CASE_6, TEST_CASE_7, TEST_CASE_8, TEST_CASE_9])\n def test_itk_reader(self, input_param, filenames, expected_shape):\n test_image = np.random.rand(128, 128, 128)\n with tempfile.TemporaryDirectory() as tempdir:\n for i, name in enumerate(filenames):\n filenames[i] = os.path.join(tempdir, name)\n itk_np_view = itk.image_view_from_array(test_image)\n itk.imwrite(itk_np_view, filenames[i])\n result = LoadImage(**input_param)(filenames)\n\n if isinstance(result, tuple):\n result, header = result\n self.assertTrue(\"affine\" in header)\n self.assertEqual(header[\"filename_or_obj\"], os.path.join(tempdir, \"test_image.nii.gz\"))\n np.testing.assert_allclose(header[\"affine\"], np.eye(4))\n np.testing.assert_allclose(header[\"original_affine\"], np.eye(4))\n self.assertTupleEqual(result.shape, expected_shape)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_itk_dicom_series_reader_TestLoadImage.test_itk_dicom_series_reader.self_assertTupleEqual_tup": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_itk_dicom_series_reader_TestLoadImage.test_itk_dicom_series_reader.self_assertTupleEqual_tup", "embedding": null, "metadata": {"file_path": "tests/test_load_image.py", "file_name": "test_load_image.py", "file_type": "text/x-python", "category": "test", "start_line": 112, "end_line": 129, "span_ids": ["TestLoadImage.test_itk_dicom_series_reader"], "tokens": 218}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadImage(unittest.TestCase):\n\n @parameterized.expand([TEST_CASE_10, TEST_CASE_11])\n def test_itk_dicom_series_reader(self, input_param, filenames, expected_shape):\n result, header = LoadImage(**input_param)(filenames)\n self.assertTrue(\"affine\" in header)\n self.assertEqual(header[\"filename_or_obj\"], filenames)\n np.testing.assert_allclose(\n header[\"affine\"],\n np.array(\n [\n [0.488281, 0.0, 0.0, -125.0],\n [0.0, 0.488281, 0.0, -128.100006],\n [0.0, 0.0, 68.33333333, -99.480003],\n [0.0, 0.0, 0.0, 1.0],\n ]\n ),\n )\n self.assertTupleEqual(result.shape, expected_shape)\n self.assertTupleEqual(tuple(header[\"spatial_shape\"]), expected_shape)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_register_TestLoadImage.test_register.with_tempfile_TemporaryDi.self_assertTupleEqual_res": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_register_TestLoadImage.test_register.with_tempfile_TemporaryDi.self_assertTupleEqual_res", "embedding": null, "metadata": {"file_path": "tests/test_load_image.py", "file_name": "test_load_image.py", "file_type": "text/x-python", "category": "test", "start_line": 146, "end_line": 158, "span_ids": ["TestLoadImage.test_register"], "tokens": 137}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadImage(unittest.TestCase):\n\n def test_register(self):\n spatial_size = (32, 64, 128)\n test_image = np.random.rand(*spatial_size)\n with tempfile.TemporaryDirectory() as tempdir:\n filename = os.path.join(tempdir, \"test_image.nii.gz\")\n itk_np_view = itk.image_view_from_array(test_image)\n itk.imwrite(itk_np_view, filename)\n\n loader = LoadImage(image_only=False)\n loader.register(ITKReader())\n result, header = loader(filename)\n self.assertTupleEqual(tuple(header[\"spatial_shape\"]), spatial_size)\n self.assertTupleEqual(result.shape, spatial_size)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_kwargs_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_image.py_TestLoadImage.test_kwargs_", "embedding": null, "metadata": {"file_path": "tests/test_load_image.py", "file_name": "test_load_image.py", "file_type": "text/x-python", "category": "test", "start_line": 169, "end_line": 192, "span_ids": ["TestLoadImage.test_kwargs", "impl:23"], "tokens": 206}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadImage(unittest.TestCase):\n\n def test_kwargs(self):\n spatial_size = (32, 64, 128)\n expected_shape = (128, 64, 32)\n test_image = np.random.rand(*spatial_size)\n with tempfile.TemporaryDirectory() as tempdir:\n filename = os.path.join(tempdir, \"test_image.nii.gz\")\n itk_np_view = itk.image_view_from_array(test_image)\n itk.imwrite(itk_np_view, filename)\n\n loader = LoadImage(image_only=False)\n reader = ITKReader(fallback_only=False)\n loader.register(reader)\n result, header = loader(filename)\n\n reader = ITKReader()\n img = reader.read(filename, fallback_only=False)\n result_raw, header_raw = reader.get_data(img)\n np.testing.assert_allclose(header[\"spatial_shape\"], header_raw[\"spatial_shape\"])\n self.assertTupleEqual(result.shape, result_raw.shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_imaged.py_TestLoadImaged.test_register_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_load_imaged.py_TestLoadImaged.test_register_", "embedding": null, "metadata": {"file_path": "tests/test_load_imaged.py", "file_name": "test_load_imaged.py", "file_type": "text/x-python", "category": "test", "start_line": 45, "end_line": 62, "span_ids": ["impl:7", "TestLoadImaged.test_register"], "tokens": 159}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestLoadImaged(unittest.TestCase):\n\n def test_register(self):\n spatial_size = (32, 64, 128)\n test_image = np.random.rand(*spatial_size)\n with tempfile.TemporaryDirectory() as tempdir:\n filename = os.path.join(tempdir, \"test_image.nii.gz\")\n itk_np_view = itk.image_view_from_array(test_image)\n itk.imwrite(itk_np_view, filename)\n\n loader = LoadImaged(keys=\"img\")\n loader.register(ITKReader())\n result = loader({\"img\": filename})\n self.assertTupleEqual(tuple(result[\"img_meta_dict\"][\"spatial_shape\"]), spatial_size)\n self.assertTupleEqual(result[\"img\"].shape, spatial_size)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_binary_to_indices.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_map_binary_to_indices.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_map_binary_to_indices.py", "file_name": "test_map_binary_to_indices.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 66, "span_ids": ["impl:9", "TestMapBinaryToIndices.test_type_shape", "TestMapBinaryToIndices", "docstring"], "tokens": 580}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import map_binary_to_indices\n\nTEST_CASE_1 = [\n {\"label\": np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]]), \"image\": None, \"image_threshold\": 0.0},\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 4, 8]),\n]\n\nTEST_CASE_2 = [\n {\n \"label\": np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]]),\n \"image\": np.array([[[1, 1, 1], [1, 0, 1], [1, 1, 1]]]),\n \"image_threshold\": 0.0,\n },\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\nTEST_CASE_3 = [\n {\n \"label\": np.array([[[0, 1, 1], [1, 0, 1], [1, 1, 0]]]),\n \"image\": np.array([[[3, 3, 3], [3, 1, 3], [3, 3, 3]]]),\n \"image_threshold\": 1.0,\n },\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\nTEST_CASE_4 = [\n {\n \"label\": np.array([[[0, 1, 2], [3, 0, 4], [5, 6, 0]]]),\n \"image\": np.array([[[3, 3, 3], [3, 1, 3], [3, 3, 3]]]),\n \"image_threshold\": 1.0,\n },\n np.array([1, 2, 3, 5, 6, 7]),\n np.array([0, 8]),\n]\n\n\nclass TestMapBinaryToIndices(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4])\n def test_type_shape(self, input_data, expected_fg, expected_bg):\n fg_indices, bg_indices = map_binary_to_indices(**input_data)\n np.testing.assert_allclose(fg_indices, expected_fg)\n np.testing.assert_allclose(bg_indices, expected_bg)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_normalize_intensity.py", "file_name": "test_normalize_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 55, "span_ids": ["docstring"], "tokens": 814}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import NormalizeIntensity\nfrom tests.utils import NumpyImageTestCase2D\n\nTEST_CASES = [\n [{\"nonzero\": True}, np.array([0.0, 3.0, 0.0, 4.0]), np.array([0.0, -1.0, 0.0, 1.0])],\n [\n {\"subtrahend\": np.array([3.5, 3.5, 3.5, 3.5]), \"divisor\": np.array([0.5, 0.5, 0.5, 0.5]), \"nonzero\": True},\n np.array([0.0, 3.0, 0.0, 4.0]),\n np.array([0.0, -1.0, 0.0, 1.0]),\n ],\n [{\"nonzero\": True}, np.array([0.0, 0.0, 0.0, 0.0]), np.array([0.0, 0.0, 0.0, 0.0])],\n [{\"nonzero\": False}, np.array([0.0, 0.0, 0.0, 0.0]), np.array([0.0, 0.0, 0.0, 0.0])],\n [{\"nonzero\": False}, np.array([1, 1, 1, 1]), np.array([0.0, 0.0, 0.0, 0.0])],\n [\n {\"nonzero\": False, \"channel_wise\": True, \"subtrahend\": [1, 2, 3]},\n np.ones((3, 2, 2)),\n np.array([[[0.0, 0.0], [0.0, 0.0]], [[-1.0, -1.0], [-1.0, -1.0]], [[-2.0, -2.0], [-2.0, -2.0]]]),\n ],\n [\n {\"nonzero\": True, \"channel_wise\": True, \"subtrahend\": [1, 2, 3], \"divisor\": [0, 0, 2]},\n np.ones((3, 2, 2)),\n np.array([[[0.0, 0.0], [0.0, 0.0]], [[-1.0, -1.0], [-1.0, -1.0]], [[-1.0, -1.0], [-1.0, -1.0]]]),\n ],\n [\n {\"nonzero\": True, \"channel_wise\": False, \"subtrahend\": 2, \"divisor\": 0},\n np.ones((3, 2, 2)),\n np.ones((3, 2, 2)) * -1.0,\n ],\n [\n {\"nonzero\": True, \"channel_wise\": False, \"subtrahend\": np.ones((3, 2, 2)) * 0.5, \"divisor\": 0},\n np.ones((3, 2, 2)),\n np.ones((3, 2, 2)) * 0.5,\n ],\n [\n {\"nonzero\": True, \"channel_wise\": True, \"subtrahend\": np.ones((3, 2, 2)) * 0.5, \"divisor\": [0, 1, 0]},\n np.ones((3, 2, 2)),\n np.ones((3, 2, 2)) * 0.5,\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity.test_channel_wise_TestNormalizeIntensity.test_channel_wise.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity.test_channel_wise_TestNormalizeIntensity.test_channel_wise.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_normalize_intensity.py", "file_name": "test_normalize_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 70, "end_line": 74, "span_ids": ["TestNormalizeIntensity.test_channel_wise"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNormalizeIntensity(NumpyImageTestCase2D):\n\n def test_channel_wise(self):\n normalizer = NormalizeIntensity(nonzero=True, channel_wise=True)\n input_data = np.array([[0.0, 3.0, 0.0, 4.0], [0.0, 4.0, 0.0, 5.0]])\n expected = np.array([[0.0, -1.0, 0.0, 1.0], [0.0, -1.0, 0.0, 1.0]])\n np.testing.assert_allclose(expected, normalizer(input_data))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity.test_value_errors_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_normalize_intensity.py_TestNormalizeIntensity.test_value_errors_", "embedding": null, "metadata": {"file_path": "tests/test_normalize_intensity.py", "file_name": "test_normalize_intensity.py", "file_type": "text/x-python", "category": "test", "start_line": 76, "end_line": 88, "span_ids": ["TestNormalizeIntensity.test_value_errors", "impl:3"], "tokens": 151}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNormalizeIntensity(NumpyImageTestCase2D):\n\n def test_value_errors(self):\n input_data = np.array([[0.0, 3.0, 0.0, 4.0], [0.0, 4.0, 0.0, 5.0]])\n normalizer = NormalizeIntensity(nonzero=True, channel_wise=True, subtrahend=[1])\n with self.assertRaises(ValueError):\n normalizer(input_data)\n normalizer = NormalizeIntensity(nonzero=True, channel_wise=True, subtrahend=[1, 2], divisor=[1])\n with self.assertRaises(ValueError):\n normalizer(input_data)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_os_TestNumpyReader.test_npy.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_os_TestNumpyReader.test_npy.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_numpy_reader.py", "file_name": "test_numpy_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 32, "span_ids": ["TestNumpyReader", "TestNumpyReader.test_npy", "docstring"], "tokens": 156}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport numpy as np\n\nfrom monai.data import NumpyReader\n\n\nclass TestNumpyReader(unittest.TestCase):\n def test_npy(self):\n test_data = np.random.randint(0, 256, size=[3, 4, 4])\n with tempfile.TemporaryDirectory() as tempdir:\n filepath = os.path.join(tempdir, \"test_data.npy\")\n np.save(filepath, test_data)\n\n reader = NumpyReader()\n result = reader.get_data(reader.read(filepath))\n self.assertTupleEqual(result[1][\"spatial_shape\"], test_data.shape)\n self.assertTupleEqual(result[0].shape, test_data.shape)\n np.testing.assert_allclose(result[0], test_data)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz1_TestNumpyReader.test_npz1.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz1_TestNumpyReader.test_npz1.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_numpy_reader.py", "file_name": "test_numpy_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 34, "end_line": 44, "span_ids": ["TestNumpyReader.test_npz1"], "tokens": 139}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNumpyReader(unittest.TestCase):\n\n def test_npz1(self):\n test_data1 = np.random.randint(0, 256, size=[3, 4, 4])\n with tempfile.TemporaryDirectory() as tempdir:\n filepath = os.path.join(tempdir, \"test_data.npy\")\n np.save(filepath, test_data1)\n\n reader = NumpyReader()\n result = reader.get_data(reader.read(filepath))\n self.assertTupleEqual(result[1][\"spatial_shape\"], test_data1.shape)\n self.assertTupleEqual(result[0].shape, test_data1.shape)\n np.testing.assert_allclose(result[0], test_data1)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz2_TestNumpyReader.test_npz2.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz2_TestNumpyReader.test_npz2.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_numpy_reader.py", "file_name": "test_numpy_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 46, "end_line": 57, "span_ids": ["TestNumpyReader.test_npz2"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNumpyReader(unittest.TestCase):\n\n def test_npz2(self):\n test_data1 = np.random.randint(0, 256, size=[3, 4, 4])\n test_data2 = np.random.randint(0, 256, size=[3, 4, 4])\n with tempfile.TemporaryDirectory() as tempdir:\n filepath = os.path.join(tempdir, \"test_data.npz\")\n np.savez(filepath, test_data1, test_data2)\n\n reader = NumpyReader()\n result = reader.get_data(reader.read(filepath))\n self.assertTupleEqual(result[1][\"spatial_shape\"], test_data1.shape)\n self.assertTupleEqual(result[0].shape, (2, 3, 4, 4))\n np.testing.assert_allclose(result[0], np.stack([test_data1, test_data2]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz3_TestNumpyReader.test_npz3.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npz3_TestNumpyReader.test_npz3.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_numpy_reader.py", "file_name": "test_numpy_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 59, "end_line": 70, "span_ids": ["TestNumpyReader.test_npz3"], "tokens": 197}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNumpyReader(unittest.TestCase):\n\n def test_npz3(self):\n test_data1 = np.random.randint(0, 256, size=[3, 4, 4])\n test_data2 = np.random.randint(0, 256, size=[3, 4, 4])\n with tempfile.TemporaryDirectory() as tempdir:\n filepath = os.path.join(tempdir, \"test_data.npz\")\n np.savez(filepath, test1=test_data1, test2=test_data2)\n\n reader = NumpyReader(npz_keys=[\"test1\", \"test2\"])\n result = reader.get_data(reader.read(filepath))\n self.assertTupleEqual(result[1][\"spatial_shape\"], test_data1.shape)\n self.assertTupleEqual(result[0].shape, (2, 3, 4, 4))\n np.testing.assert_allclose(result[0], np.stack([test_data1, test_data2]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npy_pickle_TestNumpyReader.test_npy_pickle.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_npy_pickle_TestNumpyReader.test_npy_pickle.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_numpy_reader.py", "file_name": "test_numpy_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 72, "end_line": 81, "span_ids": ["TestNumpyReader.test_npy_pickle"], "tokens": 134}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNumpyReader(unittest.TestCase):\n\n def test_npy_pickle(self):\n test_data = {\"test\": np.random.randint(0, 256, size=[3, 4, 4])}\n with tempfile.TemporaryDirectory() as tempdir:\n filepath = os.path.join(tempdir, \"test_data.npy\")\n np.save(filepath, test_data, allow_pickle=True)\n\n reader = NumpyReader()\n result = reader.get_data(reader.read(filepath))[0].item()\n self.assertTupleEqual(result[\"test\"].shape, test_data[\"test\"].shape)\n np.testing.assert_allclose(result[\"test\"], test_data[\"test\"])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_kwargs_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_numpy_reader.py_TestNumpyReader.test_kwargs_", "embedding": null, "metadata": {"file_path": "tests/test_numpy_reader.py", "file_name": "test_numpy_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 83, "end_line": 96, "span_ids": ["TestNumpyReader.test_kwargs", "impl"], "tokens": 138}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNumpyReader(unittest.TestCase):\n\n def test_kwargs(self):\n test_data = {\"test\": np.random.randint(0, 256, size=[3, 4, 4])}\n with tempfile.TemporaryDirectory() as tempdir:\n filepath = os.path.join(tempdir, \"test_data.npy\")\n np.save(filepath, test_data, allow_pickle=True)\n\n reader = NumpyReader(mmap_mode=\"r\")\n result = reader.get_data(reader.read(filepath, mmap_mode=None))[0].item()\n self.assertTupleEqual(result[\"test\"].shape, test_data[\"test\"].shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_unittest_build_test_cases.return.test_cases": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_unittest_build_test_cases.return.test_cases", "embedding": null, "metadata": {"file_path": "tests/test_optim_novograd.py", "file_name": "test_optim_novograd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 38, "span_ids": ["build_test_cases", "docstring"], "tokens": 224}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\nfrom torch.autograd import Variable\n\nfrom monai.optimizers import Novograd\n\n\ndef build_test_cases(data):\n [weight, bias, input] = data\n weight = Variable(weight, requires_grad=True)\n bias = Variable(bias, requires_grad=True)\n input = Variable(input)\n\n default_params = {\"lr\": 1e-3, \"amsgrad\": False, \"grad_averaging\": False, \"weight_decay\": 0}\n\n test_case_same_param = [{\"params\": [weight, bias]}]\n test_case_diff_param = [\n {\"params\": [weight]},\n {\"params\": [bias], \"lr\": 1e-2, \"amsgrad\": True, \"grad_averaging\": True, \"weight_decay\": 0.1},\n ]\n\n test_cases = []\n test_cases.append([test_case_same_param, default_params, weight, bias, input])\n test_cases.append([test_case_diff_param, default_params, weight, bias, input])\n return test_cases", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TEST_CASES_ALL_if_torch_cuda_device_coun.TEST_CASES_ALL_build_t": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TEST_CASES_ALL_if_torch_cuda_device_coun.TEST_CASES_ALL_build_t", "embedding": null, "metadata": {"file_path": "tests/test_optim_novograd.py", "file_name": "test_optim_novograd.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 72, "span_ids": ["impl"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASES_ALL = build_test_cases( # normal parameters\n [\n torch.randn(10, 5),\n torch.randn(10),\n torch.randn(5),\n ]\n)\n\nTEST_CASES_ALL += build_test_cases( # non-contiguous parameters\n [\n torch.randn(10, 5, 2)[..., 0],\n torch.randn(10, 2)[..., 0],\n torch.randn(5),\n ]\n)\n\nif torch.cuda.is_available():\n TEST_CASES_ALL += build_test_cases( # gpu parameters\n [\n torch.randn(10, 5).cuda(),\n torch.randn(10).cuda(),\n torch.randn(5).cuda(),\n ]\n )\nif torch.cuda.device_count() > 1:\n TEST_CASES_ALL += build_test_cases( # multi-gpu parameters\n [\n torch.randn(10, 5).cuda(0),\n torch.randn(10).cuda(1),\n torch.randn(5).cuda(0),\n ]\n )", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TestNovograd_TestNovograd.test_step.self_assertLess_fn_item": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TestNovograd_TestNovograd.test_step.self_assertLess_fn_item", "embedding": null, "metadata": {"file_path": "tests/test_optim_novograd.py", "file_name": "test_optim_novograd.py", "file_type": "text/x-python", "category": "test", "start_line": 75, "end_line": 97, "span_ids": ["TestNovograd.test_step", "TestNovograd"], "tokens": 187}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNovograd(unittest.TestCase):\n \"\"\"\n This class takes `Pytorch's test_optim function\n `_ for reference.\n \"\"\"\n\n @parameterized.expand(TEST_CASES_ALL)\n def test_step(self, specify_param, default_param, weight, bias, input):\n optimizer = Novograd(specify_param, **default_param)\n\n def fn():\n optimizer.zero_grad()\n y = weight.mv(input)\n if y.is_cuda and bias.is_cuda and y.get_device() != bias.get_device():\n y = y.cuda(bias.get_device())\n loss = (y + bias).pow(2).sum()\n loss.backward()\n return loss\n\n initial_value = fn().item()\n for _ in range(100):\n optimizer.step(fn)\n self.assertLess(fn().item(), initial_value)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TestNovograd.test_ill_arg_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_optim_novograd.py_TestNovograd.test_ill_arg_", "embedding": null, "metadata": {"file_path": "tests/test_optim_novograd.py", "file_name": "test_optim_novograd.py", "file_type": "text/x-python", "category": "test", "start_line": 99, "end_line": 115, "span_ids": ["TestNovograd.test_ill_arg", "impl:8"], "tokens": 191}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestNovograd(unittest.TestCase):\n\n def test_ill_arg(self):\n param = {\"params\": [Variable(torch.randn(10), requires_grad=True)]}\n with self.assertRaisesRegex(ValueError, \"Invalid learning rate: -1\"):\n Novograd(param, lr=-1)\n with self.assertRaisesRegex(ValueError, \"Invalid epsilon value: -1\"):\n Novograd(param, eps=-1)\n with self.assertRaisesRegex(ValueError, \"Invalid beta parameter at index 0: 1.0\"):\n Novograd(param, betas=(1.0, 0.98))\n with self.assertRaisesRegex(ValueError, \"Invalid beta parameter at index 1: -1\"):\n Novograd(param, betas=(0.9, -1))\n with self.assertRaisesRegex(ValueError, \"Invalid weight_decay value: -1\"):\n Novograd(param, weight_decay=-1)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_parallel_execution.py_unittest_TestParallelExecution.test_single_gpu.trainer_run_fake_data_str": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_parallel_execution.py_unittest_TestParallelExecution.test_single_gpu.trainer_run_fake_data_str", "embedding": null, "metadata": {"file_path": "tests/test_parallel_execution.py", "file_name": "test_parallel_execution.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 41, "span_ids": ["TestParallelExecution.test_single_gpu", "fake_loss", "TestParallelExecution", "docstring", "fake_data_stream"], "tokens": 219}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\nimport warnings\n\nimport torch\n\nfrom monai.engines import create_multigpu_supervised_trainer\nfrom tests.utils import skip_if_no_cuda\n\n\ndef fake_loss(y_pred, y):\n return (y_pred[0] + y).sum()\n\n\ndef fake_data_stream():\n while True:\n yield torch.rand((10, 1, 64, 64)), torch.rand((10, 1, 64, 64))\n\n\nclass TestParallelExecution(unittest.TestCase):\n \"\"\"\n Tests single GPU, multi GPU, and CPU execution with the Ignite supervised trainer.\n \"\"\"\n\n @skip_if_no_cuda\n def test_single_gpu(self):\n device = torch.device(\"cuda:0\")\n net = torch.nn.Conv2d(1, 1, 3, padding=1).to(device)\n opt = torch.optim.Adam(net.parameters(), 1e-3)\n trainer = create_multigpu_supervised_trainer(net, opt, fake_loss, [device])\n trainer.run(fake_data_stream(), 2, 2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_parallel_execution.py_TestParallelExecution.test_multi_gpu_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_parallel_execution.py_TestParallelExecution.test_multi_gpu_", "embedding": null, "metadata": {"file_path": "tests/test_parallel_execution.py", "file_name": "test_parallel_execution.py", "file_type": "text/x-python", "category": "test", "start_line": 43, "end_line": 65, "span_ids": ["TestParallelExecution.test_multi_gpu", "TestParallelExecution.test_cpu", "impl"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestParallelExecution(unittest.TestCase):\n\n @skip_if_no_cuda\n def test_multi_gpu(self):\n device = torch.device(\"cuda\")\n net = torch.nn.Conv2d(1, 1, 3, padding=1).to(device)\n opt = torch.optim.Adam(net.parameters(), 1e-3)\n\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\") # ignore warnings about imbalanced GPU memory\n\n trainer = create_multigpu_supervised_trainer(net, opt, fake_loss, None)\n\n trainer.run(fake_data_stream(), 2, 2)\n\n def test_cpu(self):\n net = torch.nn.Conv2d(1, 1, 3, padding=1)\n opt = torch.optim.Adam(net.parameters(), 1e-3)\n trainer = create_multigpu_supervised_trainer(net, opt, fake_loss, [])\n trainer.run(fake_data_stream(), 2, 2)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_dataset.py_sys_TestPatchDataset.test_shape.self_assertEqual_output_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_dataset.py_sys_TestPatchDataset.test_shape.self_assertEqual_output_", "embedding": null, "metadata": {"file_path": "tests/test_patch_dataset.py", "file_name": "test_patch_dataset.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 40, "span_ids": ["TestPatchDataset", "TestPatchDataset.test_shape", "identity", "docstring"], "tokens": 203}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import sys\nimport unittest\n\nimport numpy as np\n\nfrom monai.data import DataLoader, Dataset, PatchDataset\nfrom monai.transforms import RandShiftIntensity, RandSpatialCropSamples\nfrom monai.utils import set_determinism\n\n\ndef identity(x):\n # simple transform that returns the input itself\n return x\n\n\nclass TestPatchDataset(unittest.TestCase):\n def test_shape(self):\n test_dataset = [\"vwxyz\", \"hello\", \"world\"]\n n_per_image = len(test_dataset[0])\n\n result = PatchDataset(dataset=test_dataset, patch_func=identity, samples_per_image=n_per_image)\n\n output = []\n n_workers = 0 if sys.platform == \"win32\" else 2\n for item in DataLoader(result, batch_size=3, num_workers=n_workers):\n print(item)\n output.append(\"\".join(item))\n expected = [\"vwx\", \"yzh\", \"ell\", \"owo\", \"rld\"]\n self.assertEqual(output, expected)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_dataset.py_TestPatchDataset.test_loading_array_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_patch_dataset.py_TestPatchDataset.test_loading_array_", "embedding": null, "metadata": {"file_path": "tests/test_patch_dataset.py", "file_name": "test_patch_dataset.py", "file_type": "text/x-python", "category": "test", "start_line": 42, "end_line": 88, "span_ids": ["TestPatchDataset.test_loading_array", "impl"], "tokens": 500}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPatchDataset(unittest.TestCase):\n\n def test_loading_array(self):\n set_determinism(seed=1234)\n # image dataset\n images = [np.arange(16, dtype=float).reshape(1, 4, 4), np.arange(16, dtype=float).reshape(1, 4, 4)]\n # image patch sampler\n n_samples = 8\n sampler = RandSpatialCropSamples(roi_size=(3, 3), num_samples=n_samples, random_center=True, random_size=False)\n\n # image level\n patch_intensity = RandShiftIntensity(offsets=1.0, prob=1.0)\n image_ds = Dataset(images, transform=patch_intensity)\n # patch level\n ds = PatchDataset(dataset=image_ds, patch_func=sampler, samples_per_image=n_samples, transform=patch_intensity)\n\n np.testing.assert_equal(len(ds), n_samples * len(images))\n # use the patch dataset, length: len(images) x samplers_per_image\n for item in DataLoader(ds, batch_size=2, shuffle=False, num_workers=0):\n np.testing.assert_equal(tuple(item.shape), (2, 1, 3, 3))\n np.testing.assert_allclose(\n item[0],\n np.array(\n [[[1.779992, 2.779992, 3.779992], [5.779992, 6.779992, 7.779992], [9.779992, 10.779992, 11.779992]]]\n ),\n rtol=1e-5,\n )\n if sys.platform != \"win32\":\n for item in DataLoader(ds, batch_size=2, shuffle=False, num_workers=2):\n np.testing.assert_equal(tuple(item.shape), (2, 1, 3, 3))\n np.testing.assert_allclose(\n item[0],\n np.array(\n [\n [\n [5.025618, 6.025618, 7.025618],\n [9.025618, 10.025618, 11.025618],\n [13.025618, 14.025618, 15.025618],\n ]\n ]\n ),\n rtol=1e-5,\n )\n set_determinism(seed=None)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_os_TEST_CASE_7._128_128_3_test_im": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_os_TEST_CASE_7._128_128_3_test_im", "embedding": null, "metadata": {"file_path": "tests/test_pil_reader.py", "file_name": "test_pil_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 34, "span_ids": ["docstring"], "tokens": 276}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport tempfile\nimport unittest\n\nimport numpy as np\nfrom parameterized import parameterized\nfrom PIL import Image\n\nfrom monai.data import PILReader\n\nTEST_CASE_1 = [(128, 128), [\"test_image.png\"], (128, 128), (128, 128)]\n\nTEST_CASE_2 = [(128, 128, 3), [\"test_image.png\"], (128, 128, 3), (128, 128)]\n\nTEST_CASE_3 = [(128, 128, 4), [\"test_image.png\"], (128, 128, 4), (128, 128)]\n\nTEST_CASE_4 = [(128, 128), [\"test_image1.png\", \"test_image2.png\", \"test_image3.png\"], (3, 128, 128), (128, 128)]\n\nTEST_CASE_5 = [(128, 128, 3), [\"test_image.jpg\"], (128, 128, 3), (128, 128)]\n\nTEST_CASE_6 = [(128, 128, 3), [\"test_image.bmp\"], (128, 128, 3), (128, 128)]\n\nTEST_CASE_7 = [(128, 128, 3), [\"test_image.png\"], (128, 128, 2), (128, 128)]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_TestPNGReader_TestPNGReader.test_shape_value.if_result_0_shape_tes.else_.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_TestPNGReader_TestPNGReader.test_shape_value.if_result_0_shape_tes.else_.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_pil_reader.py", "file_name": "test_pil_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 37, "end_line": 55, "span_ids": ["TestPNGReader", "TestPNGReader.test_shape_value"], "tokens": 261}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPNGReader(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4, TEST_CASE_5, TEST_CASE_6])\n def test_shape_value(self, data_shape, filenames, expected_shape, meta_shape):\n test_image = np.random.randint(0, 256, size=data_shape)\n with tempfile.TemporaryDirectory() as tempdir:\n for i, name in enumerate(filenames):\n filenames[i] = os.path.join(tempdir, name)\n Image.fromarray(test_image.astype(\"uint8\")).save(filenames[i])\n reader = PILReader(mode=\"r\")\n result = reader.get_data(reader.read(filenames))\n # load image by PIL and compare the result\n test_image = np.asarray(Image.open(filenames[0]))\n\n self.assertTupleEqual(tuple(result[1][\"spatial_shape\"]), meta_shape)\n self.assertTupleEqual(result[0].shape, expected_shape)\n if result[0].shape == test_image.shape:\n np.testing.assert_allclose(result[0], test_image)\n else:\n np.testing.assert_allclose(result[0], np.tile(test_image, [result[0].shape[0], 1, 1]))", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_TestPNGReader.test_converter_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_pil_reader.py_TestPNGReader.test_converter_", "embedding": null, "metadata": {"file_path": "tests/test_pil_reader.py", "file_name": "test_pil_reader.py", "file_type": "text/x-python", "category": "test", "start_line": 57, "end_line": 76, "span_ids": ["TestPNGReader.test_converter", "impl:15"], "tokens": 216}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPNGReader(unittest.TestCase):\n\n @parameterized.expand([TEST_CASE_7])\n def test_converter(self, data_shape, filenames, expected_shape, meta_shape):\n test_image = np.random.randint(0, 256, size=data_shape)\n with tempfile.TemporaryDirectory() as tempdir:\n for i, name in enumerate(filenames):\n filenames[i] = os.path.join(tempdir, name)\n Image.fromarray(test_image.astype(\"uint8\")).save(filenames[i])\n reader = PILReader(converter=lambda image: image.convert(\"LA\"))\n result = reader.get_data(reader.read(filenames, mode=\"r\"))\n # load image by PIL and compare the result\n test_image = np.asarray(Image.open(filenames[0]).convert(\"LA\"))\n\n self.assertTupleEqual(tuple(result[1][\"spatial_shape\"]), meta_shape)\n self.assertTupleEqual(result[0].shape, expected_shape)\n np.testing.assert_allclose(result[0], test_image)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_polyval.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_polyval.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_polyval.py", "file_name": "test_polyval.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 28, "span_ids": ["docstring"], "tokens": 220}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks.layers import polyval\n\nTEST_CASES = [\n [[1.0, 2.5, -4.2], 5.0, 33.3],\n [[2, 1, 0], 3.0, 21],\n [[2, 1, 0], [3.0, 3.0], [21, 21]],\n [torch.as_tensor([2, 1, 0]), [3.0, 3.0], [21, 21]],\n [torch.as_tensor([2, 1, 0]), torch.as_tensor([3.0, 3.0]), [21, 21]],\n [torch.as_tensor([2, 1, 0]), np.array([3.0, 3.0]), [21, 21]],\n [[], np.array([3.0, 3.0]), [0, 0]],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_polyval.py_TestPolyval_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_polyval.py_TestPolyval_", "embedding": null, "metadata": {"file_path": "tests/test_polyval.py", "file_name": "test_polyval.py", "file_type": "text/x-python", "category": "test", "start_line": 31, "end_line": 53, "span_ids": ["impl:3", "TestPolyval", "TestPolyval.test_floats", "TestPolyval.test_gpu"], "tokens": 211}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPolyval(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_floats(self, coef, x, expected):\n result = polyval(coef, x)\n np.testing.assert_allclose(result.cpu().numpy(), expected)\n\n @parameterized.expand(TEST_CASES)\n def test_gpu(self, coef, x, expected):\n device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n x = torch.as_tensor(x, dtype=torch.float, device=device)\n x.requires_grad = True\n coef = torch.as_tensor(coef, dtype=torch.float, device=device)\n coef.requires_grad = True\n result = polyval(coef, x)\n if coef.shape[0] > 0: # empty coef doesn't have grad\n result.mean().backward()\n np.testing.assert_allclose(coef.grad.shape, coef.shape)\n np.testing.assert_allclose(result.cpu().detach().numpy(), expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smoothd.py_unittest_TEST_CASE_2._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smoothd.py_unittest_TEST_CASE_2._", "embedding": null, "metadata": {"file_path": "tests/test_rand_gaussian_smoothd.py", "file_name": "test_rand_gaussian_smoothd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 39, "span_ids": ["docstring"], "tokens": 512}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandGaussianSmoothd\n\nTEST_CASE_1 = [\n {\"keys\": \"img\", \"sigma_x\": (0.5, 1.5), \"prob\": 1.0},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[0.71806467, 0.9074683, 0.71806467], [1.0718315, 1.3545481, 1.0718315], [1.0337002, 1.306359, 1.0337002]],\n [[2.0318885, 2.5678391, 2.0318885], [2.6795788, 3.3863702, 2.6795788], [2.3475242, 2.9667296, 2.3475242]],\n ]\n ),\n]\n\nTEST_CASE_2 = [\n {\"keys\": \"img\", \"sigma_x\": (0.5, 1.5), \"sigma_y\": (0.5, 1.0), \"prob\": 1.0},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[0.7686928, 0.9848021, 0.7686928], [1.1474025, 1.4699818, 1.1474024], [1.1065826, 1.4176859, 1.1065826]],\n [[2.1751494, 2.7866683, 2.1751497], [2.8685062, 3.6749542, 2.8685062], [2.5130394, 3.219552, 2.5130394]],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smoothd.py_TEST_CASE_3_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_gaussian_smoothd.py_TEST_CASE_3_", "embedding": null, "metadata": {"file_path": "tests/test_rand_gaussian_smoothd.py", "file_name": "test_rand_gaussian_smoothd.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 64, "span_ids": ["impl:7", "TestRandGaussianSmoothd.test_value", "impl:5", "TestRandGaussianSmoothd"], "tokens": 363}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_3 = [\n {\"keys\": \"img\", \"sigma_x\": (0.5, 1.5), \"sigma_y\": (0.5, 1.0), \"approx\": \"scalespace\", \"prob\": 1.0},\n {\"img\": np.array([[[1, 1, 1], [2, 2, 2], [3, 3, 3]], [[4, 4, 4], [5, 5, 5], [6, 6, 6]]])},\n np.array(\n [\n [[0.8128456, 0.96736777, 0.8128456], [1.2742369, 1.5164697, 1.2742369], [1.2800367, 1.5233722, 1.2800368]],\n [[2.3825073, 2.8354228, 2.3825073], [3.1855922, 3.7911744, 3.1855922], [2.8496985, 3.391427, 2.8496985]],\n ]\n ),\n]\n\n\nclass TestRandGaussianSmoothd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3])\n def test_value(self, argments, image, expected_data):\n converter = RandGaussianSmoothd(**argments)\n converter.set_random_state(seed=0)\n result = converter(image)\n np.testing.assert_allclose(result[\"img\"], expected_data, rtol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shift.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shift.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_rand_histogram_shift.py", "file_name": "test_rand_histogram_shift.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 49, "span_ids": ["TestRandHistogramShift", "impl:3", "TestRandHistogramShift.test_rand_histogram_shift", "docstring"], "tokens": 408}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandHistogramShift\n\nTEST_CASES = [\n [\n {\"num_control_points\": 5, \"prob\": 0.0},\n {\"img\": np.arange(8).reshape((1, 2, 2, 2))},\n np.arange(8).reshape((1, 2, 2, 2)),\n ],\n [\n {\"num_control_points\": 5, \"prob\": 0.9},\n {\"img\": np.arange(8).reshape((1, 2, 2, 2)).astype(np.float32)},\n np.array([[[[0.0, 0.57227867], [1.1391707, 1.68990281]], [[2.75833219, 4.34445884], [5.70913743, 7.0]]]]),\n ],\n [\n {\"num_control_points\": (5, 20), \"prob\": 0.9},\n {\"img\": np.arange(8).reshape((1, 2, 2, 2)).astype(np.float32)},\n np.array([[[[0.0, 1.17472492], [2.21553091, 2.88292011]], [[3.98407301, 5.01302123], [6.09275004, 7.0]]]]),\n ],\n]\n\n\nclass TestRandHistogramShift(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_histogram_shift(self, input_param, input_data, expected_val):\n g = RandHistogramShift(**input_param)\n g.set_random_state(123)\n result = g(**input_data)\n np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shiftd.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shiftd.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_rand_histogram_shiftd.py", "file_name": "test_rand_histogram_shiftd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 45, "span_ids": ["docstring"], "tokens": 484}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandHistogramShiftD\n\nTEST_CASES = [\n [\n {\"keys\": (\"img\",), \"num_control_points\": 5, \"prob\": 0.0},\n {\"img\": np.arange(8).reshape((1, 2, 2, 2)), \"seg\": np.ones(8).reshape((1, 2, 2, 2))},\n {\"img\": np.arange(8).reshape((1, 2, 2, 2)), \"seg\": np.ones(8).reshape((1, 2, 2, 2))},\n ],\n [\n {\"keys\": (\"img\",), \"num_control_points\": 5, \"prob\": 0.9},\n {\"img\": np.arange(8).reshape((1, 2, 2, 2)).astype(np.float32), \"seg\": np.ones(8).reshape((1, 2, 2, 2))},\n {\n \"img\": np.array(\n [[[[0.0, 0.57227867], [1.1391707, 1.68990281]], [[2.75833219, 4.34445884], [5.70913743, 7.0]]]]\n ),\n \"seg\": np.ones(8).reshape((1, 2, 2, 2)),\n },\n ],\n [\n {\"keys\": (\"img\",), \"num_control_points\": (5, 20), \"prob\": 0.9},\n {\"img\": np.arange(8).reshape((1, 2, 2, 2)).astype(np.float32), \"seg\": np.ones(8).reshape((1, 2, 2, 2))},\n {\n \"img\": np.array(\n [[[[0.0, 1.17472492], [2.21553091, 2.88292011]], [[3.98407301, 5.01302123], [6.09275004, 7.0]]]]\n ),\n \"seg\": np.ones(8).reshape((1, 2, 2, 2)),\n },\n ],\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shiftd.py_TestRandHistogramShiftD_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_histogram_shiftd.py_TestRandHistogramShiftD_", "embedding": null, "metadata": {"file_path": "tests/test_rand_histogram_shiftd.py", "file_name": "test_rand_histogram_shiftd.py", "file_type": "text/x-python", "category": "test", "start_line": 48, "end_line": 62, "span_ids": ["impl:3", "TestRandHistogramShiftD.test_rand_histogram_shiftd", "TestRandHistogramShiftD"], "tokens": 130}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandHistogramShiftD(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_rand_histogram_shiftd(self, input_param, input_data, expected_val):\n g = RandHistogramShiftD(**input_param)\n g.set_random_state(123)\n res = g(input_data)\n for key in res:\n result = res[key]\n expected = expected_val[key] if isinstance(expected_val, dict) else expected_val\n np.testing.assert_allclose(result, expected, rtol=1e-4, atol=1e-4)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_unittest_TEST_CASE_1._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_unittest_TEST_CASE_1._", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_crop_samples.py", "file_name": "test_rand_spatial_crop_samples.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 42, "span_ids": ["docstring"], "tokens": 407}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandSpatialCropSamples\n\nTEST_CASE_1 = [\n {\"roi_size\": [3, 3, 3], \"num_samples\": 4, \"random_center\": True, \"random_size\": False},\n np.arange(192).reshape(3, 4, 4, 4),\n [(3, 3, 3, 3), (3, 3, 3, 3), (3, 3, 3, 3), (3, 3, 3, 3)],\n np.array(\n [\n [\n [[21, 22, 23], [25, 26, 27], [29, 30, 31]],\n [[37, 38, 39], [41, 42, 43], [45, 46, 47]],\n [[53, 54, 55], [57, 58, 59], [61, 62, 63]],\n ],\n [\n [[85, 86, 87], [89, 90, 91], [93, 94, 95]],\n [[101, 102, 103], [105, 106, 107], [109, 110, 111]],\n [[117, 118, 119], [121, 122, 123], [125, 126, 127]],\n ],\n [\n [[149, 150, 151], [153, 154, 155], [157, 158, 159]],\n [[165, 166, 167], [169, 170, 171], [173, 174, 175]],\n [[181, 182, 183], [185, 186, 187], [189, 190, 191]],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_TEST_CASE_2_TEST_CASE_2._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_TEST_CASE_2_TEST_CASE_2._", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_crop_samples.py", "file_name": "test_rand_spatial_crop_samples.py", "file_type": "text/x-python", "category": "test", "start_line": 44, "end_line": 67, "span_ids": ["impl:3"], "tokens": 430}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_2 = [\n {\"roi_size\": [3, 3, 3], \"num_samples\": 8, \"random_center\": False, \"random_size\": True},\n np.arange(192).reshape(3, 4, 4, 4),\n [(3, 4, 4, 3), (3, 4, 3, 3), (3, 3, 4, 4), (3, 4, 4, 4), (3, 3, 3, 4), (3, 3, 3, 3), (3, 3, 3, 3), (3, 3, 3, 3)],\n np.array(\n [\n [\n [[21, 22, 23], [25, 26, 27], [29, 30, 31]],\n [[37, 38, 39], [41, 42, 43], [45, 46, 47]],\n [[53, 54, 55], [57, 58, 59], [61, 62, 63]],\n ],\n [\n [[85, 86, 87], [89, 90, 91], [93, 94, 95]],\n [[101, 102, 103], [105, 106, 107], [109, 110, 111]],\n [[117, 118, 119], [121, 122, 123], [125, 126, 127]],\n ],\n [\n [[149, 150, 151], [153, 154, 155], [157, 158, 159]],\n [[165, 166, 167], [169, 170, 171], [173, 174, 175]],\n [[181, 182, 183], [185, 186, 187], [189, 190, 191]],\n ],\n ]\n ),\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_TestRandSpatialCropSamples_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samples.py_TestRandSpatialCropSamples_", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_crop_samples.py", "file_name": "test_rand_spatial_crop_samples.py", "file_type": "text/x-python", "category": "test", "start_line": 70, "end_line": 85, "span_ids": ["impl:5", "TestRandSpatialCropSamples", "TestRandSpatialCropSamples.test_shape"], "tokens": 136}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandSpatialCropSamples(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_shape(self, input_param, input_data, expected_shape, expected_last_item):\n xform = RandSpatialCropSamples(**input_param)\n xform.set_random_state(1234)\n result = xform(input_data)\n\n np.testing.assert_equal(len(result), input_param[\"num_samples\"])\n for item, expected in zip(result, expected_shape):\n self.assertTupleEqual(item.shape, expected)\n np.testing.assert_allclose(result[-1], expected_last_item)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_unittest_TEST_CASE_1._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_unittest_TEST_CASE_1._", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_crop_samplesd.py", "file_name": "test_rand_spatial_crop_samplesd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 39, "span_ids": ["docstring"], "tokens": 425}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import RandSpatialCropSamplesd\n\nTEST_CASE_1 = [\n {\"keys\": [\"img\", \"seg\"], \"num_samples\": 4, \"roi_size\": [2, 2, 2], \"random_center\": True},\n {\"img\": np.arange(81).reshape(3, 3, 3, 3), \"seg\": np.arange(81, 0, -1).reshape(3, 3, 3, 3)},\n [(3, 3, 3, 2), (3, 2, 2, 2), (3, 3, 3, 2), (3, 3, 2, 2)],\n {\n \"img\": np.array(\n [\n [[[0, 1], [3, 4]], [[9, 10], [12, 13]], [[18, 19], [21, 22]]],\n [[[27, 28], [30, 31]], [[36, 37], [39, 40]], [[45, 46], [48, 49]]],\n [[[54, 55], [57, 58]], [[63, 64], [66, 67]], [[72, 73], [75, 76]]],\n ]\n ),\n \"seg\": np.array(\n [\n [[[81, 80], [78, 77]], [[72, 71], [69, 68]], [[63, 62], [60, 59]]],\n [[[54, 53], [51, 50]], [[45, 44], [42, 41]], [[36, 35], [33, 32]]],\n [[[27, 26], [24, 23]], [[18, 17], [15, 14]], [[9, 8], [6, 5]]],\n ]\n ),\n },\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_TEST_CASE_2_TEST_CASE_2._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_TEST_CASE_2_TEST_CASE_2._", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_crop_samplesd.py", "file_name": "test_rand_spatial_crop_samplesd.py", "file_type": "text/x-python", "category": "test", "start_line": 41, "end_line": 61, "span_ids": ["impl:3"], "tokens": 555}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "TEST_CASE_2 = [\n {\"keys\": [\"img\", \"seg\"], \"num_samples\": 8, \"roi_size\": [2, 2, 3], \"random_center\": False},\n {\"img\": np.arange(81).reshape(3, 3, 3, 3), \"seg\": np.arange(81, 0, -1).reshape(3, 3, 3, 3)},\n [(3, 3, 3, 3), (3, 2, 3, 3), (3, 2, 2, 3), (3, 2, 3, 3), (3, 3, 3, 3), (3, 3, 3, 3), (3, 2, 2, 3), (3, 3, 2, 3)],\n {\n \"img\": np.array(\n [\n [[[0, 1, 2], [3, 4, 5]], [[9, 10, 11], [12, 13, 14]], [[18, 19, 20], [21, 22, 23]]],\n [[[27, 28, 29], [30, 31, 32]], [[36, 37, 38], [39, 40, 41]], [[45, 46, 47], [48, 49, 50]]],\n [[[54, 55, 56], [57, 58, 59]], [[63, 64, 65], [66, 67, 68]], [[72, 73, 74], [75, 76, 77]]],\n ]\n ),\n \"seg\": np.array(\n [\n [[[81, 80, 79], [78, 77, 76]], [[72, 71, 70], [69, 68, 67]], [[63, 62, 61], [60, 59, 58]]],\n [[[54, 53, 52], [51, 50, 49]], [[45, 44, 43], [42, 41, 40]], [[36, 35, 34], [33, 32, 31]]],\n [[[27, 26, 25], [24, 23, 22]], [[18, 17, 16], [15, 14, 13]], [[9, 8, 7], [6, 5, 4]]],\n ]\n ),\n },\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_TestRandSpatialCropSamplesd_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_rand_spatial_crop_samplesd.py_TestRandSpatialCropSamplesd_", "embedding": null, "metadata": {"file_path": "tests/test_rand_spatial_crop_samplesd.py", "file_name": "test_rand_spatial_crop_samplesd.py", "file_type": "text/x-python", "category": "test", "start_line": 64, "end_line": 82, "span_ids": ["impl:5", "TestRandSpatialCropSamplesd", "TestRandSpatialCropSamplesd.test_shape"], "tokens": 193}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestRandSpatialCropSamplesd(unittest.TestCase):\n @parameterized.expand([TEST_CASE_1, TEST_CASE_2])\n def test_shape(self, input_param, input_data, expected_shape, expected_last):\n xform = RandSpatialCropSamplesd(**input_param)\n xform.set_random_state(1234)\n result = xform(input_data)\n for item, expected in zip(result, expected_shape):\n self.assertTupleEqual(item[\"img\"].shape, expected)\n self.assertTupleEqual(item[\"seg\"].shape, expected)\n for i, item in enumerate(result):\n self.assertEqual(item[\"img_meta_dict\"][\"patch_index\"], i)\n self.assertEqual(item[\"seg_meta_dict\"][\"patch_index\"], i)\n np.testing.assert_allclose(item[\"img\"], expected_last[\"img\"])\n np.testing.assert_allclose(item[\"seg\"], expected_last[\"seg\"])\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resize_with_pad_or_cropd.py_unittest_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_resize_with_pad_or_cropd.py_unittest_", "embedding": null, "metadata": {"file_path": "tests/test_resize_with_pad_or_cropd.py", "file_name": "test_resize_with_pad_or_cropd.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 58, "span_ids": ["TestResizeWithPadOrCropd.test_pad_shape", "impl:3", "TestResizeWithPadOrCropd", "docstring"], "tokens": 435}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nfrom parameterized import parameterized\n\nfrom monai.transforms import ResizeWithPadOrCropd\n\nTEST_CASES = [\n [\n {\"keys\": \"img\", \"spatial_size\": [15, 8, 8], \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n (3, 15, 8, 8),\n ],\n [\n {\"keys\": \"img\", \"spatial_size\": [15, 4, 8], \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n (3, 15, 4, 8),\n ],\n [\n {\"keys\": \"img\", \"spatial_size\": [15, 4, -1], \"mode\": \"constant\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n (3, 15, 4, 4),\n ],\n [\n {\"keys\": \"img\", \"spatial_size\": [15, 4, -1], \"mode\": \"reflect\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n (3, 15, 4, 4),\n ],\n [\n {\"keys\": \"img\", \"spatial_size\": [-1, -1, -1], \"mode\": \"reflect\"},\n {\"img\": np.zeros((3, 8, 8, 4))},\n (3, 8, 8, 4),\n ],\n]\n\n\nclass TestResizeWithPadOrCropd(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_pad_shape(self, input_param, input_data, expected_val):\n paddcroper = ResizeWithPadOrCropd(**input_param)\n result = paddcroper(input_data)\n np.testing.assert_allclose(result[\"img\"].shape, expected_val)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_senet.py_TestPretrainedSENET_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_senet.py_TestPretrainedSENET_", "embedding": null, "metadata": {"file_path": "tests/test_senet.py", "file_name": "test_senet.py", "file_type": "text/x-python", "category": "test", "start_line": 62, "end_line": 91, "span_ids": ["TestPretrainedSENET.test_senet_shape", "TestPretrainedSENET", "impl:24", "TestPretrainedSENET.test_pretrain_consistency"], "tokens": 336}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestPretrainedSENET(unittest.TestCase):\n @parameterized.expand([TEST_CASE_PRETRAINED_1])\n def test_senet_shape(self, model, input_param):\n net = test_pretrained_networks(model, input_param, device)\n input_data = torch.randn(3, 3, 64, 64).to(device)\n expected_shape = (3, 2)\n net = net.to(device)\n with eval_mode(net):\n result = net(input_data)\n self.assertEqual(result.shape, expected_shape)\n\n @parameterized.expand([TEST_CASE_PRETRAINED_1])\n @skipUnless(has_cadene_pretrain, \"Requires `pretrainedmodels` package.\")\n def test_pretrain_consistency(self, model, input_param):\n input_data = torch.randn(1, 3, 64, 64).to(device)\n net = test_pretrained_networks(model, input_param, device)\n with eval_mode(net):\n result = net.features(input_data)\n cadene_net = pretrainedmodels.se_resnet50().to(device)\n with eval_mode(cadene_net):\n expected_result = cadene_net.features(input_data)\n # The difference between Cadene's senet and our version is that\n # we use nn.Linear as the FC layer, but Cadene's version uses\n # a conv layer with kernel size equals to 1. It may bring a little difference.\n self.assertTrue(torch.allclose(result, expected_result, rtol=1e-5, atol=1e-5))\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_unittest_TEST_CASES._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_unittest_TEST_CASES._", "embedding": null, "metadata": {"file_path": "tests/test_sliding_window_inference.py", "file_name": "test_sliding_window_inference.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 46, "span_ids": ["docstring"], "tokens": 927}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.inferers import SlidingWindowInferer, sliding_window_inference\nfrom tests.utils import skip_if_no_cuda\n\nTEST_CASES = [\n [(2, 3, 16), (4,), 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 1D small roi\n [(2, 3, 16, 15, 7, 9), 4, 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 4D small roi\n [(1, 3, 16, 15, 7), (4, -1, 7), 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 3D small roi\n [(2, 3, 16, 15, 7), (4, -1, 7), 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 3D small roi\n [(3, 3, 16, 15, 7), (4, -1, 7), 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 3D small roi\n [(2, 3, 16, 15, 7), (4, -1, 7), 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 3D small roi\n [(1, 3, 16, 15, 7), (4, 10, 7), 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 3D small roi\n [(1, 3, 16, 15, 7), (20, 22, 23), 10, 0.25, \"constant\", torch.device(\"cpu:0\")], # 3D large roi\n [(2, 3, 15, 7), (2, 6), 1000, 0.25, \"constant\", torch.device(\"cpu:0\")], # 2D small roi, large batch\n [(1, 3, 16, 7), (80, 50), 7, 0.25, \"constant\", torch.device(\"cpu:0\")], # 2D large roi\n [(1, 3, 16, 15, 7), (20, 22, 23), 10, 0.5, \"constant\", torch.device(\"cpu:0\")], # 3D large overlap\n [(1, 3, 16, 7), (80, 50), 7, 0.5, \"gaussian\", torch.device(\"cpu:0\")], # 2D large overlap, gaussian\n [(1, 3, 16, 15, 7), (4, 10, 7), 3, 0.25, \"gaussian\", torch.device(\"cpu:0\")], # 3D small roi, gaussian\n [(3, 3, 16, 15, 7), (4, 10, 7), 3, 0.25, \"gaussian\", torch.device(\"cpu:0\")], # 3D small roi, gaussian\n [\n (1, 3, 16, 15, 7),\n (4, 10, 7),\n 3,\n 0.25,\n \"gaussian\",\n torch.device(\"cuda:0\"),\n ], # test inference on gpu if availabe\n [(1, 3, 16, 15, 7), (4, 1, 7), 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 3D small roi\n [(5, 3, 16, 15, 7), (4, 1, 7), 3, 0.25, \"constant\", torch.device(\"cpu:0\")], # 3D small roi\n]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference_TestSlidingWindowInference.test_sliding_window_default.None_3": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference_TestSlidingWindowInference.test_sliding_window_default.None_3", "embedding": null, "metadata": {"file_path": "tests/test_sliding_window_inference.py", "file_name": "test_sliding_window_inference.py", "file_type": "text/x-python", "category": "test", "start_line": 48, "end_line": 72, "span_ids": ["TestSlidingWindowInference", "TestSlidingWindowInference.test_sliding_window_default"], "tokens": 292}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSlidingWindowInference(unittest.TestCase):\n @parameterized.expand(TEST_CASES)\n def test_sliding_window_default(self, image_shape, roi_shape, sw_batch_size, overlap, mode, device):\n n_total = np.prod(image_shape)\n if mode == \"constant\":\n inputs = torch.arange(n_total, dtype=torch.float).reshape(*image_shape)\n else:\n inputs = torch.ones(*image_shape, dtype=torch.float)\n if device.type == \"cuda\" and not torch.cuda.is_available():\n device = torch.device(\"cpu:0\")\n\n def compute(data):\n return data + 1\n\n if mode == \"constant\":\n expected_val = np.arange(n_total, dtype=np.float32).reshape(*image_shape) + 1.0\n else:\n expected_val = np.ones(image_shape, dtype=np.float32) + 1.0\n result = sliding_window_inference(inputs.to(device), roi_shape, sw_batch_size, compute, overlap, mode=mode)\n np.testing.assert_string_equal(device.type, result.device.type)\n np.testing.assert_allclose(result.cpu().numpy(), expected_val)\n\n result = SlidingWindowInferer(roi_shape, sw_batch_size, overlap, mode)(inputs.to(device), compute)\n np.testing.assert_string_equal(device.type, result.device.type)\n np.testing.assert_allclose(result.cpu().numpy(), expected_val)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_default_device_TestSlidingWindowInference.test_default_device.np_testing_assert_allclos": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_default_device_TestSlidingWindowInference.test_default_device.np_testing_assert_allclos", "embedding": null, "metadata": {"file_path": "tests/test_sliding_window_inference.py", "file_name": "test_sliding_window_inference.py", "file_type": "text/x-python", "category": "test", "start_line": 74, "end_line": 86, "span_ids": ["TestSlidingWindowInference.test_default_device"], "tokens": 168}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSlidingWindowInference(unittest.TestCase):\n\n def test_default_device(self):\n device = \"cuda\" if torch.cuda.is_available() else \"cpu:0\"\n inputs = torch.ones((1, 3, 16, 15, 7)).to(device=device)\n roi_shape = (4, 10, 7)\n sw_batch_size = 10\n\n def compute(data):\n return data + 1\n\n result = sliding_window_inference(inputs, roi_shape, sw_batch_size, compute)\n np.testing.assert_string_equal(inputs.device.type, result.device.type)\n expected_val = np.ones((1, 3, 16, 15, 7), dtype=np.float32) + 1\n np.testing.assert_allclose(result.cpu().numpy(), expected_val)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_sigma_TestSlidingWindowInference.test_sigma.result_6.sliding_window_inference_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_sigma_TestSlidingWindowInference.test_sigma.result_6.sliding_window_inference_", "embedding": null, "metadata": {"file_path": "tests/test_sliding_window_inference.py", "file_name": "test_sliding_window_inference.py", "file_type": "text/x-python", "category": "test", "start_line": 88, "end_line": 139, "span_ids": ["TestSlidingWindowInference.test_sigma"], "tokens": 573}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSlidingWindowInference(unittest.TestCase):\n\n def test_sigma(self):\n device = \"cuda\" if torch.cuda.is_available() else \"cpu:0\"\n inputs = torch.ones((1, 1, 7, 7)).to(device=device)\n roi_shape = (3, 3)\n sw_batch_size = 10\n\n class _Pred:\n add = 1\n\n def compute(self, data):\n self.add += 1\n return data + self.add\n\n result = sliding_window_inference(\n inputs,\n roi_shape,\n sw_batch_size,\n _Pred().compute,\n overlap=0.5,\n padding_mode=\"constant\",\n cval=-1,\n mode=\"constant\",\n sigma_scale=1.0,\n )\n\n expected = np.array(\n [\n [\n [\n [3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000],\n [3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000, 3.0000],\n [3.3333, 3.3333, 3.3333, 3.3333, 3.3333, 3.3333, 3.3333],\n [3.6667, 3.6667, 3.6667, 3.6667, 3.6667, 3.6667, 3.6667],\n [4.3333, 4.3333, 4.3333, 4.3333, 4.3333, 4.3333, 4.3333],\n [4.5000, 4.5000, 4.5000, 4.5000, 4.5000, 4.5000, 4.5000],\n [5.0000, 5.0000, 5.0000, 5.0000, 5.0000, 5.0000, 5.0000],\n ]\n ]\n ]\n )\n np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)\n result = sliding_window_inference(\n inputs,\n roi_shape,\n sw_batch_size,\n _Pred().compute,\n overlap=0.5,\n padding_mode=\"constant\",\n cval=-1,\n mode=\"gaussian\",\n sigma_scale=1.0,\n )\n # ... other code", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_sigma.expected_7_TestSlidingWindowInference.test_sigma.None_3": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_sliding_window_inference.py_TestSlidingWindowInference.test_sigma.expected_7_TestSlidingWindowInference.test_sigma.None_3", "embedding": null, "metadata": {"file_path": "tests/test_sliding_window_inference.py", "file_name": "test_sliding_window_inference.py", "file_type": "text/x-python", "category": "test", "start_line": 140, "end_line": 165, "span_ids": ["TestSlidingWindowInference.test_sigma"], "tokens": 506}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestSlidingWindowInference(unittest.TestCase):\n\n def test_sigma(self):\n # ... other code\n expected = np.array(\n [\n [\n [\n [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0],\n [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0],\n [3.3271625, 3.3271623, 3.3271623, 3.3271623, 3.3271623, 3.3271623, 3.3271625],\n [3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377, 3.6728377],\n [4.3271623, 4.3271623, 4.3271627, 4.3271627, 4.3271627, 4.3271623, 4.3271623],\n [4.513757, 4.513757, 4.513757, 4.513757, 4.513757, 4.513757, 4.513757],\n [4.9999995, 5.0, 5.0, 5.0, 5.0, 5.0, 4.9999995],\n ]\n ]\n ]\n )\n np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)\n\n result = SlidingWindowInferer(roi_shape, sw_batch_size, overlap=0.5, mode=\"gaussian\", sigma_scale=1.0)(\n inputs, _Pred().compute\n )\n np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)\n\n result = SlidingWindowInferer(roi_shape, sw_batch_size, overlap=0.5, mode=\"gaussian\", sigma_scale=[1.0, 1.0])(\n inputs, _Pred().compute\n )\n np.testing.assert_allclose(result.cpu().numpy(), expected, rtol=1e-4)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_surface_distance.py_unittest_create_spherical_seg_3d.return.image": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_surface_distance.py_unittest_create_spherical_seg_3d.return.image", "embedding": null, "metadata": {"file_path": "tests/test_surface_distance.py", "file_name": "test_surface_distance.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 48, "span_ids": ["create_spherical_seg_3d", "docstring"], "tokens": 318}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\nfrom typing import Tuple\n\nimport numpy as np\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.metrics import SurfaceDistanceMetric\n\n\ndef create_spherical_seg_3d(\n radius: float = 20.0,\n centre: Tuple[int, int, int] = (49, 49, 49),\n im_shape: Tuple[int, int, int] = (99, 99, 99),\n) -> np.ndarray:\n \"\"\"\n Return a 3D image with a sphere inside. Voxel values will be\n 1 inside the sphere, and 0 elsewhere.\n\n Args:\n radius: radius of sphere (in terms of number of voxels, can be partial)\n centre: location of sphere centre.\n im_shape: shape of image to create\n\n See also:\n :py:meth:`~create_test_image_3d`\n \"\"\"\n # Create image\n image = np.zeros(im_shape, dtype=np.int32)\n spy, spx, spz = np.ogrid[\n -centre[0] : im_shape[0] - centre[0], -centre[1] : im_shape[1] - centre[1], -centre[2] : im_shape[2] - centre[2]\n ]\n circle = (spx * spx + spy * spy + spz * spz) <= radius * radius\n\n image[circle] = 1\n image[~circle] = 0\n return image", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_upsample_block.py_unittest_TEST_CASES_EQ._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_upsample_block.py_unittest_TEST_CASES_EQ._", "embedding": null, "metadata": {"file_path": "tests/test_upsample_block.py", "file_name": "test_upsample_block.py", "file_type": "text/x-python", "category": "test", "start_line": 12, "end_line": 66, "span_ids": ["impl:3", "docstring"], "tokens": 660}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import unittest\n\nimport torch\nfrom parameterized import parameterized\n\nfrom monai.networks import eval_mode\nfrom monai.networks.blocks import UpSample\nfrom monai.utils import UpsampleMode\n\nTEST_CASES = [\n [{\"dimensions\": 2, \"in_channels\": 4}, (7, 4, 32, 48), (7, 4, 64, 96)], # 4-channel 2D, batch 7\n [\n {\"dimensions\": 1, \"in_channels\": 4, \"out_channels\": 3},\n (16, 4, 63),\n (16, 3, 126),\n ], # 4-channel 1D, batch 16\n [\n {\"dimensions\": 1, \"in_channels\": 4, \"out_channels\": 8, \"mode\": \"deconv\", \"align_corners\": False},\n (16, 4, 20),\n (16, 8, 40),\n ], # 4-channel 1D, batch 16\n [\n {\"dimensions\": 3, \"in_channels\": 4, \"mode\": \"nontrainable\"},\n (16, 4, 32, 24, 48),\n (16, 4, 64, 48, 96),\n ], # 4-channel 3D, batch 16\n [\n {\"dimensions\": 3, \"in_channels\": 1, \"mode\": \"deconv\", \"scale_factor\": 3, \"align_corners\": False},\n (16, 1, 10, 15, 20),\n (16, 1, 30, 45, 60),\n ], # 1-channel 3D, batch 16\n [\n {\"dimensions\": 3, \"in_channels\": 1, \"mode\": \"pixelshuffle\", \"scale_factor\": 2, \"align_corners\": False},\n (16, 1, 10, 15, 20),\n (16, 1, 20, 30, 40),\n ], # 1-channel 3D, batch 16\n [\n {\"dimensions\": 2, \"in_channels\": 4, \"mode\": \"pixelshuffle\", \"scale_factor\": 2},\n (16, 4, 10, 15),\n (16, 4, 20, 30),\n ], # 4-channel 2D, batch 16\n [\n {\n \"dimensions\": 3,\n \"mode\": \"pixelshuffle\",\n \"scale_factor\": 2,\n \"align_corners\": False,\n \"pre_conv\": torch.nn.Conv3d(in_channels=1, out_channels=24, kernel_size=3, stride=1, padding=1),\n },\n (16, 1, 10, 15, 20),\n (16, 3, 20, 30, 40),\n ], # 1-channel 3D, batch 16, pre_conv\n]\n\nTEST_CASES_EQ = []", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_upsample_block.py_for_s_in_range_1_5__": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_upsample_block.py_for_s_in_range_1_5__", "embedding": null, "metadata": {"file_path": "tests/test_upsample_block.py", "file_name": "test_upsample_block.py", "file_type": "text/x-python", "category": "test", "start_line": 67, "end_line": 96, "span_ids": ["impl:3", "TestUpsample", "impl:13", "TestUpsample.test_shape"], "tokens": 210}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "for s in range(1, 5):\n expected_shape = (16, 5, 4 * s, 5 * s, 6 * s)\n for t in UpsampleMode:\n test_case = [\n {\n \"dimensions\": 3,\n \"in_channels\": 3,\n \"out_channels\": 5,\n \"mode\": t,\n \"scale_factor\": s,\n \"align_corners\": True,\n },\n (16, 3, 4, 5, 6),\n ]\n test_case.append(expected_shape)\n TEST_CASES_EQ.append(test_case)\n\n\nclass TestUpsample(unittest.TestCase):\n @parameterized.expand(TEST_CASES + TEST_CASES_EQ)\n def test_shape(self, input_param, input_shape, expected_shape):\n net = UpSample(**input_param)\n with eval_mode(net):\n result = net(torch.randn(input_shape))\n self.assertEqual(result.shape, expected_shape)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom.test_keep_size_TestZoom.test_invalid_inputs.with_self_assertRaises_ra.zoom_fn_self_imt_0_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom.test_keep_size_TestZoom.test_invalid_inputs.with_self_assertRaises_ra.zoom_fn_self_imt_0_", "embedding": null, "metadata": {"file_path": "tests/test_zoom.py", "file_name": "test_zoom.py", "file_type": "text/x-python", "category": "test", "start_line": 40, "end_line": 53, "span_ids": ["TestZoom.test_keep_size", "TestZoom.test_invalid_inputs"], "tokens": 183}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestZoom(NumpyImageTestCase2D):\n\n def test_keep_size(self):\n zoom_fn = Zoom(zoom=[0.6, 0.6], keep_size=True, align_corners=True)\n zoomed = zoom_fn(self.imt[0], mode=\"bilinear\")\n np.testing.assert_allclose(zoomed.shape, self.imt.shape[1:])\n\n zoom_fn = Zoom(zoom=[1.3, 1.3], keep_size=True)\n zoomed = zoom_fn(self.imt[0])\n np.testing.assert_allclose(zoomed.shape, self.imt.shape[1:])\n\n @parameterized.expand(INVALID_CASES)\n def test_invalid_inputs(self, zoom, mode, raises):\n with self.assertRaises(raises):\n zoom_fn = Zoom(zoom=zoom, mode=mode)\n zoom_fn(self.imt[0])", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom.test_padding_mode_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/tests/test_zoom.py_TestZoom.test_padding_mode_", "embedding": null, "metadata": {"file_path": "tests/test_zoom.py", "file_name": "test_zoom.py", "file_type": "text/x-python", "category": "test", "start_line": 55, "end_line": 65, "span_ids": ["TestZoom.test_padding_mode", "impl:5"], "tokens": 250}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class TestZoom(NumpyImageTestCase2D):\n\n def test_padding_mode(self):\n zoom_fn = Zoom(zoom=0.5, mode=\"nearest\", padding_mode=\"constant\", keep_size=True)\n test_data = np.array([[[1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0]]])\n zoomed = zoom_fn(test_data)\n expected = np.array([[[0.0, 0.0, 0.0, 0.0], [0.0, 1.0, 1.0, 0.0], [0.0, 1.0, 1.0, 0.0], [0.0, 0.0, 0.0, 0.0]]])\n np.testing.assert_allclose(zoomed, expected)\n\n\nif __name__ == \"__main__\":\n unittest.main()", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_pre_render_pep440_pre.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_pre_render_pep440_pre.return.rendered", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 345, "end_line": 358, "span_ids": ["render_pep440_pre"], "tokens": 107}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_pep440_pre(pieces):\n \"\"\"TAG[.post0.devDISTANCE] -- No -dirty.\n\n Exceptions:\n 1: no tags. 0.post0.devDISTANCE\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n if pieces[\"distance\"]:\n rendered += \".post0.dev%d\" % pieces[\"distance\"]\n else:\n # exception #1\n rendered = \"0.post0.dev%d\" % pieces[\"distance\"]\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_post_render_pep440_post.return.rendered": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/_version.py_render_pep440_post_render_pep440_post.return.rendered", "embedding": null, "metadata": {"file_path": "monai/_version.py", "file_name": "_version.py", "file_type": "text/x-python", "category": "implementation", "start_line": 361, "end_line": 385, "span_ids": ["render_pep440_post"], "tokens": 217}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "def render_pep440_post(pieces):\n \"\"\"TAG[.postDISTANCE[.dev0]+gHEX] .\n\n The \".dev0\" means dirty. Note that .dev0 sorts backwards\n (a dirty tree will appear \"older\" than the corresponding clean one),\n but you shouldn't be releasing software with -dirty anyways.\n\n Exceptions:\n 1: no tags. 0.postDISTANCE[.dev0]\n \"\"\"\n if pieces[\"closest-tag\"]:\n rendered = pieces[\"closest-tag\"]\n if pieces[\"distance\"] or pieces[\"dirty\"]:\n rendered += \".post%d\" % pieces[\"distance\"]\n if pieces[\"dirty\"]:\n rendered += \".dev0\"\n rendered += plus_or_dot(pieces)\n rendered += \"g%s\" % pieces[\"short\"]\n else:\n # exception #1\n rendered = \"0.post%d\" % pieces[\"distance\"]\n if pieces[\"dirty\"]:\n rendered += \".dev0\"\n rendered += \"+g%s\" % pieces[\"short\"]\n return rendered", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/__init__.py_CrossValidation_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/__init__.py_CrossValidation_", "embedding": null, "metadata": {"file_path": "monai/apps/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 14, "span_ids": ["docstring"], "tokens": 32}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "from .datasets import CrossValidation, DecathlonDataset, MedNISTDataset\nfrom .utils import check_hash, download_and_extract, download_url, extractall", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_os___all__._MedNISTDataset_Decat": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_os___all__._MedNISTDataset_Decat", "embedding": null, "metadata": {"file_path": "monai/apps/datasets.py", "file_name": "datasets.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 29, "span_ids": ["docstring"], "tokens": 111}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import os\nimport sys\nfrom typing import Callable, Dict, List, Optional, Sequence, Union\n\nimport numpy as np\n\nfrom monai.apps.utils import download_and_extract\nfrom monai.data import (\n CacheDataset,\n load_decathlon_datalist,\n load_decathlon_properties,\n partition_dataset,\n select_cross_validation_folds,\n)\nfrom monai.transforms import LoadImaged, Randomizable\nfrom monai.utils import ensure_tuple\n\n__all__ = [\"MedNISTDataset\", \"DecathlonDataset\", \"CrossValidation\"]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_MedNISTDataset_MedNISTDataset.randomize.self_R_shuffle_data_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_MedNISTDataset_MedNISTDataset.randomize.self_R_shuffle_data_", "embedding": null, "metadata": {"file_path": "monai/apps/datasets.py", "file_name": "datasets.py", "file_type": "text/x-python", "category": "implementation", "start_line": 32, "end_line": 102, "span_ids": ["MedNISTDataset.__init__", "MedNISTDataset", "MedNISTDataset.randomize"], "tokens": 774}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MedNISTDataset(Randomizable, CacheDataset):\n \"\"\"\n The Dataset to automatically download MedNIST data and generate items for training, validation or test.\n It's based on `CacheDataset` to accelerate the training process.\n\n Args:\n root_dir: target directory to download and load MedNIST dataset.\n section: expected data section, can be: `training`, `validation` or `test`.\n transform: transforms to execute operations on input data.\n download: whether to download and extract the MedNIST from resource link, default is False.\n if expected file already exists, skip downloading even set it to True.\n user can manually copy `MedNIST.tar.gz` file or `MedNIST` folder to root directory.\n seed: random seed to randomly split training, validation and test datasets, default is 0.\n val_frac: percentage of of validation fraction in the whole dataset, default is 0.1.\n test_frac: percentage of of test fraction in the whole dataset, default is 0.1.\n cache_num: number of items to be cached. Default is `sys.maxsize`.\n will take the minimum of (cache_num, data_length x cache_rate, data_length).\n cache_rate: percentage of cached data in total, default is 1.0 (cache all).\n will take the minimum of (cache_num, data_length x cache_rate, data_length).\n num_workers: the number of worker threads to use.\n if 0 a single thread will be used. Default is 0.\n\n Raises:\n ValueError: When ``root_dir`` is not a directory.\n RuntimeError: When ``dataset_dir`` doesn't exist and downloading is not selected (``download=False``).\n\n \"\"\"\n\n resource = \"https://www.dropbox.com/s/5wwskxctvcxiuea/MedNIST.tar.gz?dl=1\"\n md5 = \"0bc7306e7427e00ad1c5526a6677552d\"\n compressed_file_name = \"MedNIST.tar.gz\"\n dataset_folder_name = \"MedNIST\"\n\n def __init__(\n self,\n root_dir: str,\n section: str,\n transform: Union[Sequence[Callable], Callable] = (),\n download: bool = False,\n seed: int = 0,\n val_frac: float = 0.1,\n test_frac: float = 0.1,\n cache_num: int = sys.maxsize,\n cache_rate: float = 1.0,\n num_workers: int = 0,\n ) -> None:\n if not os.path.isdir(root_dir):\n raise ValueError(\"Root directory root_dir must be a directory.\")\n self.section = section\n self.val_frac = val_frac\n self.test_frac = test_frac\n self.set_random_state(seed=seed)\n tarfile_name = os.path.join(root_dir, self.compressed_file_name)\n dataset_dir = os.path.join(root_dir, self.dataset_folder_name)\n self.num_class = 0\n if download:\n download_and_extract(self.resource, tarfile_name, root_dir, self.md5)\n\n if not os.path.exists(dataset_dir):\n raise RuntimeError(\n f\"Cannot find dataset directory: {dataset_dir}, please use download=True to download it.\"\n )\n data = self._generate_data_list(dataset_dir)\n if transform == ():\n transform = LoadImaged(\"image\")\n CacheDataset.__init__(\n self, data, transform, cache_num=cache_num, cache_rate=cache_rate, num_workers=num_workers\n )\n\n def randomize(self, data: List[int]) -> None:\n self.R.shuffle(data)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_MedNISTDataset.get_num_classes_MedNISTDataset._generate_data_list.return.data": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_MedNISTDataset.get_num_classes_MedNISTDataset._generate_data_list.return.data", "embedding": null, "metadata": {"file_path": "monai/apps/datasets.py", "file_name": "datasets.py", "file_type": "text/x-python", "category": "implementation", "start_line": 104, "end_line": 155, "span_ids": ["MedNISTDataset.get_num_classes", "MedNISTDataset._generate_data_list"], "tokens": 425}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class MedNISTDataset(Randomizable, CacheDataset):\n\n def get_num_classes(self) -> int:\n \"\"\"Get number of classes.\"\"\"\n return self.num_class\n\n def _generate_data_list(self, dataset_dir: str) -> List[Dict]:\n \"\"\"\n Raises:\n ValueError: When ``section`` is not one of [\"training\", \"validation\", \"test\"].\n\n \"\"\"\n class_names = sorted((x for x in os.listdir(dataset_dir) if os.path.isdir(os.path.join(dataset_dir, x))))\n self.num_class = len(class_names)\n image_files = [\n [\n os.path.join(dataset_dir, class_names[i], x)\n for x in os.listdir(os.path.join(dataset_dir, class_names[i]))\n ]\n for i in range(self.num_class)\n ]\n num_each = [len(image_files[i]) for i in range(self.num_class)]\n image_files_list = []\n image_class = []\n class_name = []\n for i in range(self.num_class):\n image_files_list.extend(image_files[i])\n image_class.extend([i] * num_each[i])\n class_name.extend([class_names[i]] * num_each[i])\n num_total = len(image_class)\n\n data = []\n\n length = len(image_files_list)\n indices = np.arange(length)\n self.randomize(indices)\n\n test_length = int(length * self.test_frac)\n val_length = int(length * self.val_frac)\n if self.section == \"test\":\n section_indices = indices[:test_length]\n elif self.section == \"validation\":\n section_indices = indices[test_length : test_length + val_length]\n elif self.section == \"training\":\n section_indices = indices[test_length + val_length :]\n else:\n raise ValueError(\n f'Unsupported section: {self.section}, available options are [\"training\", \"validation\", \"test\"].'\n )\n\n for i in section_indices:\n data.append({\"image\": image_files_list[i], \"label\": image_class[i], \"class_name\": class_name[i]})\n\n return data", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset.__init___DecathlonDataset._split_datalist.return._datalist_i_for_i_in_sel": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_DecathlonDataset.__init___DecathlonDataset._split_datalist.return._datalist_i_for_i_in_sel", "embedding": null, "metadata": {"file_path": "monai/apps/datasets.py", "file_name": "datasets.py", "file_type": "text/x-python", "category": "implementation", "start_line": 238, "end_line": 328, "span_ids": ["DecathlonDataset._split_datalist", "DecathlonDataset.get_properties", "DecathlonDataset.__init__", "DecathlonDataset._generate_data_list", "DecathlonDataset.randomize", "DecathlonDataset.get_indices"], "tokens": 748}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class DecathlonDataset(Randomizable, CacheDataset):\n\n def __init__(\n self,\n root_dir: str,\n task: str,\n section: str,\n transform: Union[Sequence[Callable], Callable] = (),\n download: bool = False,\n seed: int = 0,\n val_frac: float = 0.2,\n cache_num: int = sys.maxsize,\n cache_rate: float = 1.0,\n num_workers: int = 0,\n ) -> None:\n if not os.path.isdir(root_dir):\n raise ValueError(\"Root directory root_dir must be a directory.\")\n self.section = section\n self.val_frac = val_frac\n self.set_random_state(seed=seed)\n if task not in self.resource:\n raise ValueError(f\"Unsupported task: {task}, available options are: {list(self.resource.keys())}.\")\n dataset_dir = os.path.join(root_dir, task)\n tarfile_name = f\"{dataset_dir}.tar\"\n if download:\n download_and_extract(self.resource[task], tarfile_name, root_dir, self.md5[task])\n\n if not os.path.exists(dataset_dir):\n raise RuntimeError(\n f\"Cannot find dataset directory: {dataset_dir}, please use download=True to download it.\"\n )\n self.indices: np.ndarray = np.array([])\n data = self._generate_data_list(dataset_dir)\n # as `release` key has typo in Task04 config file, ignore it.\n property_keys = [\n \"name\",\n \"description\",\n \"reference\",\n \"licence\",\n \"tensorImageSize\",\n \"modality\",\n \"labels\",\n \"numTraining\",\n \"numTest\",\n ]\n self._properties = load_decathlon_properties(os.path.join(dataset_dir, \"dataset.json\"), property_keys)\n if transform == ():\n transform = LoadImaged([\"image\", \"label\"])\n CacheDataset.__init__(\n self, data, transform, cache_num=cache_num, cache_rate=cache_rate, num_workers=num_workers\n )\n\n def get_indices(self) -> np.ndarray:\n \"\"\"\n Get the indices of datalist used in this dataset.\n\n \"\"\"\n return self.indices\n\n def randomize(self, data: List[int]) -> None:\n self.R.shuffle(data)\n\n def get_properties(self, keys: Optional[Union[Sequence[str], str]] = None):\n \"\"\"\n Get the loaded properties of dataset with specified keys.\n If no keys specified, return all the loaded properties.\n\n \"\"\"\n if keys is None:\n return self._properties\n if self._properties is not None:\n return {key: self._properties[key] for key in ensure_tuple(keys)}\n return {}\n\n def _generate_data_list(self, dataset_dir: str) -> List[Dict]:\n section = \"training\" if self.section in [\"training\", \"validation\"] else \"test\"\n datalist = load_decathlon_datalist(os.path.join(dataset_dir, \"dataset.json\"), True, section)\n return self._split_datalist(datalist)\n\n def _split_datalist(self, datalist: List[Dict]) -> List[Dict]:\n if self.section == \"test\":\n return datalist\n length = len(datalist)\n indices = np.arange(length)\n self.randomize(indices)\n\n val_length = int(length * self.val_frac)\n if self.section == \"training\":\n self.indices = indices[val_length:]\n else:\n self.indices = indices[:val_length]\n\n return [datalist[i] for i in self.indices]", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_CrossValidation_CrossValidation.__init__.self.dataset_params.dataset_params": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_CrossValidation_CrossValidation.__init__.self.dataset_params.dataset_params", "embedding": null, "metadata": {"file_path": "monai/apps/datasets.py", "file_name": "datasets.py", "file_type": "text/x-python", "category": "implementation", "start_line": 331, "end_line": 380, "span_ids": ["CrossValidation.__init__", "CrossValidation"], "tokens": 383}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CrossValidation:\n \"\"\"\n Cross validation dataset based on the general dataset which must have `_split_datalist` API.\n\n Args:\n dataset_cls: dataset class to be used to create the cross validation partitions.\n It must have `_split_datalist` API.\n nfolds: number of folds to split the data for cross validation.\n seed: random seed to randomly shuffle the datalist before splitting into N folds, default is 0.\n dataset_params: other additional parameters for the dataset_cls base class.\n\n Example of 5 folds cross validation training::\n\n cvdataset = CrossValidation(\n dataset_cls=DecathlonDataset,\n nfolds=5,\n seed=12345,\n root_dir=\"./\",\n task=\"Task09_Spleen\",\n section=\"training\",\n download=True,\n )\n dataset_fold0_train = cvdataset.get_dataset(folds=[1, 2, 3, 4])\n dataset_fold0_val = cvdataset.get_dataset(folds=0)\n # execute training for fold 0 ...\n\n dataset_fold1_train = cvdataset.get_dataset(folds=[1])\n dataset_fold1_val = cvdataset.get_dataset(folds=[0, 2, 3, 4])\n # execute training for fold 1 ...\n\n ...\n\n dataset_fold4_train = ...\n # execute training for fold 4 ...\n\n \"\"\"\n\n def __init__(\n self,\n dataset_cls,\n nfolds: int = 5,\n seed: int = 0,\n **dataset_params,\n ) -> None:\n if not hasattr(dataset_cls, \"_split_datalist\"):\n raise ValueError(\"dataset class must have _split_datalist API.\")\n self.dataset_cls = dataset_cls\n self.nfolds = nfolds\n self.seed = seed\n self.dataset_params = dataset_params", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_CrossValidation.get_dataset_": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/datasets.py_CrossValidation.get_dataset_", "embedding": null, "metadata": {"file_path": "monai/apps/datasets.py", "file_name": "datasets.py", "file_type": "text/x-python", "category": "implementation", "start_line": 382, "end_line": 399, "span_ids": ["CrossValidation.get_dataset"], "tokens": 163}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "class CrossValidation:\n\n def get_dataset(self, folds: Union[Sequence[int], int]):\n \"\"\"\n Generate dataset based on the specified fold indice in the cross validation group.\n\n Args:\n folds: index of folds for training or validation, if a list of values, concatenate the data.\n\n \"\"\"\n nfolds = self.nfolds\n seed = self.seed\n\n class _NsplitsDataset(self.dataset_cls): # type: ignore\n def _split_datalist(self, datalist: List[Dict]) -> List[Dict]:\n data = partition_dataset(data=datalist, num_partitions=nfolds, shuffle=True, seed=seed)\n return select_cross_validation_folds(partitions=data, folds=folds)\n\n return _NsplitsDataset(**self.dataset_params)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/__init__.py__": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/__init__.py__", "embedding": null, "metadata": {"file_path": "monai/apps/deepgrow/__init__.py", "file_name": "__init__.py", "file_type": "text/x-python", "category": "implementation", "start_line": 11, "end_line": 11, "span_ids": [], "tokens": 0}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n", "class_name": "TextNode"}, "__type__": "1"}, "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/dataset.py_logging_create_dataset.new_datalist._": {"__data__": {"id_": "/home/jiayipan/code/24FA/temp/ml-01/moatless-tools/t/repos/swe-train_Project-MONAI__MONAI/monai/apps/deepgrow/dataset.py_logging_create_dataset.new_datalist._", "embedding": null, "metadata": {"file_path": "monai/apps/deepgrow/dataset.py", "file_name": "dataset.py", "file_type": "text/x-python", "category": "implementation", "start_line": 12, "end_line": 86, "span_ids": ["create_dataset", "docstring"], "tokens": 626}, "excluded_embed_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date", "start_line", "end_line", "tokens"], "excluded_llm_metadata_keys": ["file_name", "file_type", "file_size", "creation_date", "last_modified_date", "last_accessed_date"], "relationships": {}, "text": "import logging\nimport os\nfrom typing import Dict, List\n\nimport numpy as np\n\nfrom monai.transforms import AsChannelFirstd, Compose, LoadImaged, Orientationd, Spacingd\nfrom monai.utils import GridSampleMode\n\n\ndef create_dataset(\n datalist,\n output_dir: str,\n dimension: int,\n pixdim,\n image_key: str = \"image\",\n label_key: str = \"label\",\n base_dir=None,\n limit: int = 0,\n relative_path: bool = False,\n transforms=None,\n) -> List[Dict]:\n \"\"\"\n Utility to pre-process and create dataset list for Deepgrow training over on existing one.\n The input data list is normally a list of images and labels (3D volume) that needs pre-processing\n for Deepgrow training pipeline.\n\n Args:\n datalist: A list of data dictionary. Each entry should at least contain 'image_key': .\n For example, typical input data can be a list of dictionaries::\n\n [{'image': , 'label':