1
- from typing import Union , List , Set
1
+ from typing import Union , List , Set , Tuple
2
2
import datetime
3
3
4
4
from mcim_sync .database .mongodb import raw_mongo_client
27
27
MODRINTH_DELAY : Union [float , int ] = config .modrinth_delay
28
28
29
29
30
- def check_modrinth_data_updated_and_alive (projects : List [Project ]) -> tuple [set [str ], set [str ]]:
31
- project_info = {
32
- project .id : {"sync_date" : project .updated , "versions" : project .versions }
30
+
31
+
32
+ def check_modrinth_data_updated_and_alive (
33
+ projects : List [Project ],
34
+ ) -> Tuple [Set [str ], Set [str ]]:
35
+ local_project_info = {
36
+ project .id : {
37
+ "updated" : project .updated .replace (tzinfo = None ),
38
+ "versions" : project .versions ,
39
+ "game_versions" : project .game_versions ,
40
+ }
33
41
for project in projects
34
42
}
35
- expired_project_ids : Set [str ] = set ()
36
- db_project_ids = [project .id for project in projects ]
37
- alive_project_ids = []
38
-
39
- info = fetch_mutil_projects_info (project_ids = db_project_ids )
40
-
41
- if info is not None :
42
- with ModelSubmitter () as submitter :
43
- for project in info :
44
- project_id = project ["id" ]
45
-
46
- # mark as alive
47
- alive_project_ids .append (project_id )
48
-
49
- submitter .add (Project (** project ))
50
-
51
- sync_date : datetime .datetime = project_info [project_id ][
52
- "sync_date"
53
- ].replace (tzinfo = None )
54
- project_info [project_id ]["source_date" ] = project ["updated" ]
55
- updated_date = datetime .datetime .fromisoformat (project ["updated" ]).replace (
56
- tzinfo = None
43
+
44
+ all_project_ids = list (local_project_info .keys ())
45
+ outdated_ids : Set [str ] = set ()
46
+ alive_ids : Set [str ] = set ()
47
+
48
+ remote_projects = fetch_mutil_projects_info (project_ids = all_project_ids )
49
+
50
+ if remote_projects is None :
51
+ return set (), set ()
52
+
53
+ with ModelSubmitter () as submitter :
54
+ for remote in remote_projects :
55
+ project_id = remote ["id" ]
56
+ alive_ids .add (project_id )
57
+
58
+ local = local_project_info [project_id ]
59
+
60
+ local_updated = local ["updated" ]
61
+ remote_updated = datetime .datetime .fromisoformat (remote ["updated" ]).replace (
62
+ tzinfo = None
63
+ )
64
+
65
+ local_versions = local ["versions" ]
66
+ remote_versions = remote ["versions" ]
67
+
68
+ local_game_versions = local ["game_versions" ]
69
+ remote_game_versions = remote ["game_versions" ]
70
+
71
+ if _is_project_updated (
72
+ local_updated , remote_updated
73
+ ): # Check if project is updated
74
+ outdated_ids .add (project_id )
75
+ log .debug (f"[{ project_id } ] Updated: { local_updated } → { remote_updated } " )
76
+ elif _has_versions_changed (
77
+ local_versions , remote_versions
78
+ ): # Check if versions have changed
79
+ outdated_ids .add (project_id )
80
+ diff_versions = set (remote_versions ) ^ set (local_versions )
81
+ if diff_versions :
82
+ log .debug (
83
+ f"[{ project_id } ] Version { diff_versions } mismatch, needs sync."
84
+ )
85
+ elif _has_game_versions_changed (
86
+ local_game_versions , remote_game_versions
87
+ ): # Check if game versions have changed
88
+ outdated_ids .add (project_id )
89
+ diff_game_versions = set (remote_game_versions ) ^ set (
90
+ local_game_versions
57
91
)
58
- if int (sync_date .timestamp ()) == int (updated_date .timestamp ()):
59
- if project_info [project_id ]["versions" ] != project ["versions" ]:
60
- log .debug (
61
- f"Project { project_id } version count is not completely equal, some version were deleted, sync it!"
62
- )
63
- expired_project_ids .add (project_id )
64
- else :
65
- log .debug (f"Project { project_id } is not updated, pass!" )
66
- else :
67
- expired_project_ids .add (project_id )
92
+ if diff_game_versions :
68
93
log .debug (
69
- f"Project { project_id } is updated { sync_date . isoformat ( timespec = 'seconds' ) } -> { updated_date . isoformat ( timespec = 'seconds' ) } ! "
94
+ f"[ { project_id } ] Game versions { diff_game_versions } changed, needs sync. "
70
95
)
96
+ else :
97
+ log .debug (f"[{ project_id } ] No change, skipping." )
71
98
72
- # check if project is not alive
73
- not_alive_project_ids = set (db_project_ids ) - set (alive_project_ids )
99
+ submitter .add (Project (** remote ))
100
+
101
+ dead_ids = set (all_project_ids ) - alive_ids
102
+
103
+ log .debug (f"Outdated projects: { len (outdated_ids )} , Dead projects: { len (dead_ids )} " )
104
+
105
+ return outdated_ids , dead_ids
106
+
107
+
108
+ def _is_project_updated (local : datetime .datetime , remote : datetime .datetime ) -> bool :
109
+ return int (local .timestamp ()) != int (remote .timestamp ())
74
110
75
- log .debug (
76
- f"Expired project ids: { len (expired_project_ids )} , not alive project ids: { len (not_alive_project_ids )} "
77
- )
78
111
79
- return expired_project_ids , not_alive_project_ids
112
+ def _has_versions_changed (
113
+ local_versions : List [str ], remote_versions : List [str ]
114
+ ) -> bool :
115
+ return local_versions != remote_versions
116
+
117
+
118
+ def _has_game_versions_changed (
119
+ local_game_versions : List [str ], remote_game_versions : List [str ]
120
+ ) -> bool :
121
+ return local_game_versions != remote_game_versions
80
122
81
123
82
124
# check modrinth_project_ids queue
@@ -128,7 +170,9 @@ def check_modrinth_hashes_available():
128
170
chunk = hashes [i : i + MODRINTH_LIMIT_SIZE ]
129
171
info = fetch_multi_hashes_info (hashes = chunk , algorithm = algorithm )
130
172
if info is not None :
131
- available_project_ids .extend ([hash ["project_id" ] for hash in info .values ()])
173
+ available_project_ids .extend (
174
+ [hash ["project_id" ] for hash in info .values ()]
175
+ )
132
176
return list (set (available_project_ids ))
133
177
134
178
@@ -142,6 +186,7 @@ def check_new_project_ids(project_ids: List[str]) -> List[str]:
142
186
found_project_ids = [project ["_id" ] for project in find_result ]
143
187
return list (set (project_ids ) - set (found_project_ids ))
144
188
189
+
145
190
def check_newest_search_result () -> List [str ]:
146
191
"""
147
192
遍历 newest search result 直到出现第一个已缓存的 project_id,然后返回所有 new project_id
@@ -156,12 +201,12 @@ def check_newest_search_result() -> List[str]:
156
201
break
157
202
158
203
temp_project_ids = [project ["project_id" ] for project in res ["hits" ]]
159
-
204
+
160
205
# Check which projects are already in database
161
206
existing_projects = set (
162
- doc ["_id" ] for doc in raw_mongo_client [ "modrinth_projects" ]. find (
163
- { "_id" : { "$in" : temp_project_ids }},
164
- {"_id" : 1 }
207
+ doc ["_id" ]
208
+ for doc in raw_mongo_client [ "modrinth_projects" ]. find (
209
+ {"_id" : { "$in" : temp_project_ids }}, { "_id" : 1 }
165
210
)
166
211
)
167
212
@@ -174,7 +219,7 @@ def check_newest_search_result() -> List[str]:
174
219
# If all projects are new, add them and continue searching
175
220
new_project_ids .extend (temp_project_ids )
176
221
log .debug (f"Found { len (temp_project_ids )} new project IDs at offset { offset } " )
177
-
222
+
178
223
offset += limit
179
224
180
- return new_project_ids
225
+ return new_project_ids
0 commit comments