blob: 042d0c739feabe737c7b78ef2dde80b96ae58cf5 [file] [log] [blame]
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +01001# Copyright 2016 The Chromium Authors
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
Copybara854996b2021-09-07 19:36:02 +00004
5"""Servlet to import a file of issues in JSON format.
6"""
7from __future__ import print_function
8from __future__ import division
9from __future__ import absolute_import
10
11import collections
12import json
Copybara854996b2021-09-07 19:36:02 +000013
14from features import filterrules_helpers
15from framework import framework_helpers
16from framework import jsonfeed
17from framework import permissions
18from framework import servlet
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +010019from mrproto import tracker_pb2
20from tracker import tracker_bizobj
Copybara854996b2021-09-07 19:36:02 +000021
22ParserState = collections.namedtuple(
23 'ParserState',
24 'user_id_dict, nonexist_emails, issue_list, comments_dict, starrers_dict, '
25 'relations_dict')
26
27
28class IssueImport(servlet.Servlet):
29 """IssueImport loads a file of issues in JSON format."""
30
31 _PAGE_TEMPLATE = 'tracker/issue-import-page.ezt'
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +010032 _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
Copybara854996b2021-09-07 19:36:02 +000033
34 def AssertBasePermission(self, mr):
35 """Make sure that the logged in user has permission to view this page."""
36 super(IssueImport, self).AssertBasePermission(mr)
37 if not mr.auth.user_pb.is_site_admin:
38 raise permissions.PermissionException(
39 'Only site admins may import issues')
40
41 def GatherPageData(self, mr):
42 """Build up a dictionary of data values to use when rendering the page."""
43 return {
44 'issue_tab_mode': None,
45 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
46 'import_errors': [],
47 }
48
49 def ProcessFormData(self, mr, post_data):
50 """Process the issue entry form.
51
52 Args:
53 mr: commonly used info parsed from the request.
54 post_data: The post_data dict for the current request.
55
56 Returns:
57 String URL to redirect the user to after processing.
58 """
59 import_errors = []
60 json_data = None
61
62 pre_check_only = 'pre_check_only' in post_data
63
64 uploaded_file = post_data.get('jsonfile')
65 if uploaded_file is None:
66 import_errors.append('No file uploaded')
67 else:
68 try:
69 json_str = uploaded_file.value
70 if json_str.startswith(jsonfeed.XSSI_PREFIX):
71 json_str = json_str[len(jsonfeed.XSSI_PREFIX):]
72 json_data = json.loads(json_str)
73 except ValueError:
74 import_errors.append('error parsing JSON in file')
75
76 if uploaded_file and not json_data:
77 import_errors.append('JSON file was empty')
78
79 # Note that the project must already exist in order to even reach
80 # this servlet because it is hosted in the context of a project.
81 if json_data and mr.project_name != json_data['metadata']['project']:
82 import_errors.append(
83 'Project name does not match. '
84 'Edit the file if you want to import into this project anyway.')
85
86 if import_errors:
87 return self.PleaseCorrect(mr, import_errors=import_errors)
88
89 event_log = [] # We accumulate a list of messages to display to the user.
90
91 try:
92 # First we parse the JSON into objects, but we don't have DB IDs yet.
93 state = self._ParseObjects(mr.cnxn, mr.project_id, json_data, event_log)
94 # If that worked, go ahead and start saving the data to the DB.
95 if not pre_check_only:
96 self._SaveObjects(mr.cnxn, mr.project_id, state, event_log)
97 except JSONImportError:
98 # just report it to the user by displaying event_log
99 event_log.append('Aborted import processing')
100
101 # This is a little bit of a hack because it always uses the form validation
102 # error message display logic to show the results of this import run,
103 # which may include errors or not.
104 return self.PleaseCorrect(mr, import_errors=event_log)
105
106 def _ParseObjects(self, cnxn, project_id, json_data, event_log):
107 """Examine JSON data and return a parser state for further processing."""
108 # Decide which users need to be created.
109 needed_emails = json_data['emails']
110 user_id_dict = self.services.user.LookupExistingUserIDs(cnxn, needed_emails)
111 nonexist_emails = [email for email in needed_emails
112 if email not in user_id_dict]
113
114 event_log.append('Need to create %d users: %r' %
115 (len(nonexist_emails), nonexist_emails))
116 user_id_dict.update({
117 email.lower(): framework_helpers.MurmurHash3_x86_32(email.lower())
118 for email in nonexist_emails})
119
120 num_comments = 0
121 num_stars = 0
122 issue_list = []
123 comments_dict = collections.defaultdict(list)
124 starrers_dict = collections.defaultdict(list)
125 relations_dict = collections.defaultdict(list)
126 for issue_json in json_data.get('issues', []):
127 issue, comment_list, starrer_list, relation_list = self._ParseIssue(
128 cnxn, project_id, user_id_dict, issue_json, event_log)
129 issue_list.append(issue)
130 comments_dict[issue.local_id] = comment_list
131 starrers_dict[issue.local_id] = starrer_list
132 relations_dict[issue.local_id] = relation_list
133 num_comments += len(comment_list)
134 num_stars += len(starrer_list)
135
136 event_log.append(
137 'Found info for %d issues: %r' %
138 (len(issue_list), sorted([issue.local_id for issue in issue_list])))
139
140 event_log.append(
141 'Found %d total comments for %d issues' %
142 (num_comments, len(comments_dict)))
143
144 event_log.append(
145 'Found %d total stars for %d issues' %
146 (num_stars, len(starrers_dict)))
147
148 event_log.append(
149 'Found %d total relationships.' %
150 sum((len(dsts) for dsts in relations_dict.values())))
151
152 event_log.append('Parsing phase finished OK')
153 return ParserState(
154 user_id_dict, nonexist_emails, issue_list,
155 comments_dict, starrers_dict, relations_dict)
156
157 def _ParseIssue(self, cnxn, project_id, user_id_dict, issue_json, event_log):
158 issue = tracker_pb2.Issue(
159 project_id=project_id,
160 local_id=issue_json['local_id'],
161 reporter_id=user_id_dict[issue_json['reporter']],
162 summary=issue_json['summary'],
163 opened_timestamp=issue_json['opened'],
164 modified_timestamp=issue_json['modified'],
165 cc_ids=[user_id_dict[cc_email]
166 for cc_email in issue_json.get('cc', [])
167 if cc_email in user_id_dict],
168 status=issue_json.get('status', ''),
169 labels=issue_json.get('labels', []),
170 field_values=[self._ParseFieldValue(cnxn, project_id, user_id_dict, field)
171 for field in issue_json.get('fields', [])])
172 if issue_json.get('owner'):
173 issue.owner_id = user_id_dict[issue_json['owner']]
174 if issue_json.get('closed'):
175 issue.closed_timestamp = issue_json['closed']
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100176 config = self.services.config.GetProjectConfig(cnxn, project_id)
Copybara854996b2021-09-07 19:36:02 +0000177 comments = [self._ParseComment(
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100178 project_id, user_id_dict, comment_json, event_log, config)
Copybara854996b2021-09-07 19:36:02 +0000179 for comment_json in issue_json.get('comments', [])]
180
181 starrers = [user_id_dict[starrer] for starrer in issue_json['starrers']]
182
183 relations = []
184 relations.extend(
185 [(i, 'blockedon') for i in issue_json.get('blocked_on', [])])
186 relations.extend(
187 [(i, 'blocking') for i in issue_json.get('blocking', [])])
188 if 'merged_into' in issue_json:
189 relations.append((issue_json['merged_into'], 'mergedinto'))
190
191 return issue, comments, starrers, relations
192
193 def _ParseFieldValue(self, cnxn, project_id, user_id_dict, field_json):
194 field = tracker_pb2.FieldValue(
195 field_id=self.services.config.LookupFieldID(cnxn, project_id,
196 field_json['field']))
197 if 'int_value' in field_json:
198 field.int_value = field_json['int_value']
199 if 'str_value' in field_json:
200 field.str_value = field_json['str_value']
201 if 'user_value' in field_json:
202 field.user_value = user_id_dict.get(field_json['user_value'])
203
204 return field
205
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100206 def _ParseComment(
207 self, project_id, user_id_dict, comment_json, event_log, config):
Copybara854996b2021-09-07 19:36:02 +0000208 comment = tracker_pb2.IssueComment(
209 # Note: issue_id is filled in after the issue is saved.
210 project_id=project_id,
211 timestamp=comment_json['timestamp'],
212 user_id=user_id_dict[comment_json['commenter']],
213 content=comment_json.get('content'))
214
215 for amendment in comment_json['amendments']:
216 comment.amendments.append(
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100217 self._ParseAmendment(amendment, user_id_dict, event_log, config))
Copybara854996b2021-09-07 19:36:02 +0000218
219 for attachment in comment_json['attachments']:
220 comment.attachments.append(
221 self._ParseAttachment(attachment, event_log))
222
223 if comment_json['description_num']:
224 comment.is_description = True
225
226 return comment
227
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100228 def _ParseAmendment(self, amendment_json, user_id_dict, _event_log, config):
Copybara854996b2021-09-07 19:36:02 +0000229 amendment = tracker_pb2.Amendment(
230 field=tracker_pb2.FieldID(amendment_json['field']))
231
232 if 'new_value' in amendment_json:
233 amendment.newvalue = amendment_json['new_value']
234 if 'custom_field_name' in amendment_json:
235 amendment.custom_field_name = amendment_json['custom_field_name']
236 if 'added_users' in amendment_json:
237 amendment.added_user_ids.extend(
238 [user_id_dict[email] for email in amendment_json['added_users']])
239 if 'removed_users' in amendment_json:
240 amendment.removed_user_ids.extend(
241 [user_id_dict[email] for email in amendment_json['removed_users']])
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100242 if 'added_components' in amendment_json:
243 for comp in amendment_json['added_components']:
244 comp_def = tracker_bizobj.FindComponentDef(comp, config)
245 if comp_def:
246 amendment.added_component_ids.extend(comp_def.component_id)
247 if 'removed_components' in amendment_json:
248 for comp in amendment_json['removed_components']:
249 comp_def = tracker_bizobj.FindComponentDef(comp, config)
250 if comp_def:
251 amendment.removed_component_ids.extend(comp_def.component_id)
Copybara854996b2021-09-07 19:36:02 +0000252 return amendment
253
254 def _ParseAttachment(self, attachment_json, _event_log):
255 attachment = tracker_pb2.Attachment(
256 filename=attachment_json['name'],
257 filesize=attachment_json['size'],
258 mimetype=attachment_json['mimetype'],
259 gcs_object_id=attachment_json['gcs_object_id']
260 )
261 return attachment
262
263 def _SaveObjects(self, cnxn, project_id, state, event_log):
264 """Examine JSON data and create users, issues, and comments."""
265
266 created_user_ids = self.services.user.LookupUserIDs(
267 cnxn, state.nonexist_emails, autocreate=True)
268 for created_email, created_id in created_user_ids.items():
269 if created_id != state.user_id_dict[created_email]:
270 event_log.append('Mismatched user_id for %r' % created_email)
271 raise JSONImportError()
272 event_log.append('Created %d users' % len(state.nonexist_emails))
273
274 total_comments = 0
275 total_stars = 0
276 config = self.services.config.GetProjectConfig(cnxn, project_id)
277 for issue in state.issue_list:
278 # TODO(jrobbins): renumber issues if there is a local_id conflict.
279 if issue.local_id not in state.starrers_dict:
280 # Issues with stars will have filter rules applied in SetStar().
281 filterrules_helpers.ApplyFilterRules(
282 cnxn, self.services, issue, config)
283 issue_id = self.services.issue.InsertIssue(cnxn, issue)
284 for comment in state.comments_dict[issue.local_id]:
285 total_comments += 1
286 comment.issue_id = issue_id
287 self.services.issue.InsertComment(cnxn, comment)
288 self.services.issue_star.SetStarsBatch(
289 cnxn, self.services, config, issue_id,
290 state.starrers_dict[issue.local_id], True)
291 total_stars += len(state.starrers_dict[issue.local_id])
292
293 event_log.append('Created %d issues' % len(state.issue_list))
294 event_log.append('Created %d comments for %d issues' % (
295 total_comments, len(state.comments_dict)))
296 event_log.append('Set %d stars on %d issues' % (
297 total_stars, len(state.starrers_dict)))
298
299 global_relations_dict = collections.defaultdict(list)
300 for issue, rels in state.relations_dict.items():
301 src_iid = self.services.issue.GetIssueByLocalID(
302 cnxn, project_id, issue).issue_id
303 dst_iids = [i.issue_id for i in self.services.issue.GetIssuesByLocalIDs(
304 cnxn, project_id, [rel[0] for rel in rels])]
305 kinds = [rel[1] for rel in rels]
306 global_relations_dict[src_iid] = list(zip(dst_iids, kinds))
307 self.services.issue.RelateIssues(cnxn, global_relations_dict)
308
309 self.services.issue.SetUsedLocalID(cnxn, project_id)
310 event_log.append('Finished import')
311
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100312 def GetIssueImport(self, **kwargs):
313 return self.handler(**kwargs)
Adrià Vilanova Martínezde942802022-07-15 14:06:55 +0200314
Adrià Vilanova Martínezf19ea432024-01-23 20:20:52 +0100315 def PostIssueImport(self, **kwargs):
316 return self.handler(**kwargs)
Adrià Vilanova Martínezde942802022-07-15 14:06:55 +0200317
Copybara854996b2021-09-07 19:36:02 +0000318
319class JSONImportError(Exception):
320 """Exception to raise if imported JSON is invalid."""
321 pass