Spaces:
Runtime error
Runtime error
Commit
·
88cc2f3
1
Parent(s):
72ce58f
hf_token update
Browse files- .idea/.gitignore +3 -0
- .idea/inspectionProfiles/Project_Default.xml +20 -0
- .idea/inspectionProfiles/profiles_settings.xml +6 -0
- .idea/llama2-7b-chat-hf.iml +8 -0
- .idea/modules.xml +8 -0
- .idea/vcs.xml +6 -0
- __pycache__/model.cpython-38.pyc +0 -0
- model.py +2 -1
.idea/.gitignore
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
# Default ignored files
|
2 |
+
/shelf/
|
3 |
+
/workspace.xml
|
.idea/inspectionProfiles/Project_Default.xml
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<component name="InspectionProjectProfileManager">
|
2 |
+
<profile version="1.0">
|
3 |
+
<option name="myName" value="Project Default" />
|
4 |
+
<inspection_tool class="PyPackageRequirementsInspection" enabled="false" level="WARNING" enabled_by_default="false">
|
5 |
+
<option name="ignoredPackages">
|
6 |
+
<value>
|
7 |
+
<list size="0" />
|
8 |
+
</value>
|
9 |
+
</option>
|
10 |
+
</inspection_tool>
|
11 |
+
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
12 |
+
<option name="ignoredErrors">
|
13 |
+
<list>
|
14 |
+
<option value="N806" />
|
15 |
+
<option value="N802" />
|
16 |
+
</list>
|
17 |
+
</option>
|
18 |
+
</inspection_tool>
|
19 |
+
</profile>
|
20 |
+
</component>
|
.idea/inspectionProfiles/profiles_settings.xml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<component name="InspectionProjectProfileManager">
|
2 |
+
<settings>
|
3 |
+
<option name="USE_PROJECT_PROFILE" value="false" />
|
4 |
+
<version value="1.0" />
|
5 |
+
</settings>
|
6 |
+
</component>
|
.idea/llama2-7b-chat-hf.iml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<module type="PYTHON_MODULE" version="4">
|
3 |
+
<component name="NewModuleRootManager">
|
4 |
+
<content url="file://$MODULE_DIR$" />
|
5 |
+
<orderEntry type="inheritedJdk" />
|
6 |
+
<orderEntry type="sourceFolder" forTests="false" />
|
7 |
+
</component>
|
8 |
+
</module>
|
.idea/modules.xml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="ProjectModuleManager">
|
4 |
+
<modules>
|
5 |
+
<module fileurl="file://$PROJECT_DIR$/.idea/llama2-7b-chat-hf.iml" filepath="$PROJECT_DIR$/.idea/llama2-7b-chat-hf.iml" />
|
6 |
+
</modules>
|
7 |
+
</component>
|
8 |
+
</project>
|
.idea/vcs.xml
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="VcsDirectoryMappings">
|
4 |
+
<mapping directory="" vcs="Git" />
|
5 |
+
</component>
|
6 |
+
</project>
|
__pycache__/model.cpython-38.pyc
ADDED
Binary file (2.12 kB). View file
|
|
model.py
CHANGED
@@ -12,7 +12,8 @@ if torch.cuda.is_available():
|
|
12 |
model_id,
|
13 |
torch_dtype=torch.float16,
|
14 |
device_map='auto',
|
15 |
-
cache_dir='models'
|
|
|
16 |
)
|
17 |
else:
|
18 |
model = None
|
|
|
12 |
model_id,
|
13 |
torch_dtype=torch.float16,
|
14 |
device_map='auto',
|
15 |
+
cache_dir='models',
|
16 |
+
token="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"
|
17 |
)
|
18 |
else:
|
19 |
model = None
|