blob: bd54db9f15d8da0e26db25879d98723be4de3709 [file] [log] [blame]
Copybara854996b2021-09-07 19:36:02 +00001# Copyright 2016 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style
3# license that can be found in the LICENSE file or at
4# https://developers.google.com/open-source/licenses/bsd
5
6"""Servlet to import a file of issues in JSON format.
7"""
8from __future__ import print_function
9from __future__ import division
10from __future__ import absolute_import
11
12import collections
13import json
14import logging
15import time
16
17import ezt
18
19from features import filterrules_helpers
Adrià Vilanova Martínezde942802022-07-15 14:06:55 +020020from framework import flaskservlet
Copybara854996b2021-09-07 19:36:02 +000021from framework import framework_helpers
22from framework import jsonfeed
23from framework import permissions
24from framework import servlet
25from framework import urls
26from proto import tracker_pb2
27
28
29ParserState = collections.namedtuple(
30 'ParserState',
31 'user_id_dict, nonexist_emails, issue_list, comments_dict, starrers_dict, '
32 'relations_dict')
33
34
35class IssueImport(servlet.Servlet):
36 """IssueImport loads a file of issues in JSON format."""
37
38 _PAGE_TEMPLATE = 'tracker/issue-import-page.ezt'
Adrià Vilanova Martínezde942802022-07-15 14:06:55 +020039 _MAIN_TAB_MODE = flaskservlet.FlaskServlet.MAIN_TAB_ISSUES
Copybara854996b2021-09-07 19:36:02 +000040
41 def AssertBasePermission(self, mr):
42 """Make sure that the logged in user has permission to view this page."""
43 super(IssueImport, self).AssertBasePermission(mr)
44 if not mr.auth.user_pb.is_site_admin:
45 raise permissions.PermissionException(
46 'Only site admins may import issues')
47
48 def GatherPageData(self, mr):
49 """Build up a dictionary of data values to use when rendering the page."""
50 return {
51 'issue_tab_mode': None,
52 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
53 'import_errors': [],
54 }
55
56 def ProcessFormData(self, mr, post_data):
57 """Process the issue entry form.
58
59 Args:
60 mr: commonly used info parsed from the request.
61 post_data: The post_data dict for the current request.
62
63 Returns:
64 String URL to redirect the user to after processing.
65 """
66 import_errors = []
67 json_data = None
68
69 pre_check_only = 'pre_check_only' in post_data
70
71 uploaded_file = post_data.get('jsonfile')
72 if uploaded_file is None:
73 import_errors.append('No file uploaded')
74 else:
75 try:
76 json_str = uploaded_file.value
77 if json_str.startswith(jsonfeed.XSSI_PREFIX):
78 json_str = json_str[len(jsonfeed.XSSI_PREFIX):]
79 json_data = json.loads(json_str)
80 except ValueError:
81 import_errors.append('error parsing JSON in file')
82
83 if uploaded_file and not json_data:
84 import_errors.append('JSON file was empty')
85
86 # Note that the project must already exist in order to even reach
87 # this servlet because it is hosted in the context of a project.
88 if json_data and mr.project_name != json_data['metadata']['project']:
89 import_errors.append(
90 'Project name does not match. '
91 'Edit the file if you want to import into this project anyway.')
92
93 if import_errors:
94 return self.PleaseCorrect(mr, import_errors=import_errors)
95
96 event_log = [] # We accumulate a list of messages to display to the user.
97
98 try:
99 # First we parse the JSON into objects, but we don't have DB IDs yet.
100 state = self._ParseObjects(mr.cnxn, mr.project_id, json_data, event_log)
101 # If that worked, go ahead and start saving the data to the DB.
102 if not pre_check_only:
103 self._SaveObjects(mr.cnxn, mr.project_id, state, event_log)
104 except JSONImportError:
105 # just report it to the user by displaying event_log
106 event_log.append('Aborted import processing')
107
108 # This is a little bit of a hack because it always uses the form validation
109 # error message display logic to show the results of this import run,
110 # which may include errors or not.
111 return self.PleaseCorrect(mr, import_errors=event_log)
112
113 def _ParseObjects(self, cnxn, project_id, json_data, event_log):
114 """Examine JSON data and return a parser state for further processing."""
115 # Decide which users need to be created.
116 needed_emails = json_data['emails']
117 user_id_dict = self.services.user.LookupExistingUserIDs(cnxn, needed_emails)
118 nonexist_emails = [email for email in needed_emails
119 if email not in user_id_dict]
120
121 event_log.append('Need to create %d users: %r' %
122 (len(nonexist_emails), nonexist_emails))
123 user_id_dict.update({
124 email.lower(): framework_helpers.MurmurHash3_x86_32(email.lower())
125 for email in nonexist_emails})
126
127 num_comments = 0
128 num_stars = 0
129 issue_list = []
130 comments_dict = collections.defaultdict(list)
131 starrers_dict = collections.defaultdict(list)
132 relations_dict = collections.defaultdict(list)
133 for issue_json in json_data.get('issues', []):
134 issue, comment_list, starrer_list, relation_list = self._ParseIssue(
135 cnxn, project_id, user_id_dict, issue_json, event_log)
136 issue_list.append(issue)
137 comments_dict[issue.local_id] = comment_list
138 starrers_dict[issue.local_id] = starrer_list
139 relations_dict[issue.local_id] = relation_list
140 num_comments += len(comment_list)
141 num_stars += len(starrer_list)
142
143 event_log.append(
144 'Found info for %d issues: %r' %
145 (len(issue_list), sorted([issue.local_id for issue in issue_list])))
146
147 event_log.append(
148 'Found %d total comments for %d issues' %
149 (num_comments, len(comments_dict)))
150
151 event_log.append(
152 'Found %d total stars for %d issues' %
153 (num_stars, len(starrers_dict)))
154
155 event_log.append(
156 'Found %d total relationships.' %
157 sum((len(dsts) for dsts in relations_dict.values())))
158
159 event_log.append('Parsing phase finished OK')
160 return ParserState(
161 user_id_dict, nonexist_emails, issue_list,
162 comments_dict, starrers_dict, relations_dict)
163
164 def _ParseIssue(self, cnxn, project_id, user_id_dict, issue_json, event_log):
165 issue = tracker_pb2.Issue(
166 project_id=project_id,
167 local_id=issue_json['local_id'],
168 reporter_id=user_id_dict[issue_json['reporter']],
169 summary=issue_json['summary'],
170 opened_timestamp=issue_json['opened'],
171 modified_timestamp=issue_json['modified'],
172 cc_ids=[user_id_dict[cc_email]
173 for cc_email in issue_json.get('cc', [])
174 if cc_email in user_id_dict],
175 status=issue_json.get('status', ''),
176 labels=issue_json.get('labels', []),
177 field_values=[self._ParseFieldValue(cnxn, project_id, user_id_dict, field)
178 for field in issue_json.get('fields', [])])
179 if issue_json.get('owner'):
180 issue.owner_id = user_id_dict[issue_json['owner']]
181 if issue_json.get('closed'):
182 issue.closed_timestamp = issue_json['closed']
183 comments = [self._ParseComment(
184 project_id, user_id_dict, comment_json, event_log)
185 for comment_json in issue_json.get('comments', [])]
186
187 starrers = [user_id_dict[starrer] for starrer in issue_json['starrers']]
188
189 relations = []
190 relations.extend(
191 [(i, 'blockedon') for i in issue_json.get('blocked_on', [])])
192 relations.extend(
193 [(i, 'blocking') for i in issue_json.get('blocking', [])])
194 if 'merged_into' in issue_json:
195 relations.append((issue_json['merged_into'], 'mergedinto'))
196
197 return issue, comments, starrers, relations
198
199 def _ParseFieldValue(self, cnxn, project_id, user_id_dict, field_json):
200 field = tracker_pb2.FieldValue(
201 field_id=self.services.config.LookupFieldID(cnxn, project_id,
202 field_json['field']))
203 if 'int_value' in field_json:
204 field.int_value = field_json['int_value']
205 if 'str_value' in field_json:
206 field.str_value = field_json['str_value']
207 if 'user_value' in field_json:
208 field.user_value = user_id_dict.get(field_json['user_value'])
209
210 return field
211
212 def _ParseComment(self, project_id, user_id_dict, comment_json, event_log):
213 comment = tracker_pb2.IssueComment(
214 # Note: issue_id is filled in after the issue is saved.
215 project_id=project_id,
216 timestamp=comment_json['timestamp'],
217 user_id=user_id_dict[comment_json['commenter']],
218 content=comment_json.get('content'))
219
220 for amendment in comment_json['amendments']:
221 comment.amendments.append(
222 self._ParseAmendment(amendment, user_id_dict, event_log))
223
224 for attachment in comment_json['attachments']:
225 comment.attachments.append(
226 self._ParseAttachment(attachment, event_log))
227
228 if comment_json['description_num']:
229 comment.is_description = True
230
231 return comment
232
233 def _ParseAmendment(self, amendment_json, user_id_dict, _event_log):
234 amendment = tracker_pb2.Amendment(
235 field=tracker_pb2.FieldID(amendment_json['field']))
236
237 if 'new_value' in amendment_json:
238 amendment.newvalue = amendment_json['new_value']
239 if 'custom_field_name' in amendment_json:
240 amendment.custom_field_name = amendment_json['custom_field_name']
241 if 'added_users' in amendment_json:
242 amendment.added_user_ids.extend(
243 [user_id_dict[email] for email in amendment_json['added_users']])
244 if 'removed_users' in amendment_json:
245 amendment.removed_user_ids.extend(
246 [user_id_dict[email] for email in amendment_json['removed_users']])
247
248 return amendment
249
250 def _ParseAttachment(self, attachment_json, _event_log):
251 attachment = tracker_pb2.Attachment(
252 filename=attachment_json['name'],
253 filesize=attachment_json['size'],
254 mimetype=attachment_json['mimetype'],
255 gcs_object_id=attachment_json['gcs_object_id']
256 )
257 return attachment
258
259 def _SaveObjects(self, cnxn, project_id, state, event_log):
260 """Examine JSON data and create users, issues, and comments."""
261
262 created_user_ids = self.services.user.LookupUserIDs(
263 cnxn, state.nonexist_emails, autocreate=True)
264 for created_email, created_id in created_user_ids.items():
265 if created_id != state.user_id_dict[created_email]:
266 event_log.append('Mismatched user_id for %r' % created_email)
267 raise JSONImportError()
268 event_log.append('Created %d users' % len(state.nonexist_emails))
269
270 total_comments = 0
271 total_stars = 0
272 config = self.services.config.GetProjectConfig(cnxn, project_id)
273 for issue in state.issue_list:
274 # TODO(jrobbins): renumber issues if there is a local_id conflict.
275 if issue.local_id not in state.starrers_dict:
276 # Issues with stars will have filter rules applied in SetStar().
277 filterrules_helpers.ApplyFilterRules(
278 cnxn, self.services, issue, config)
279 issue_id = self.services.issue.InsertIssue(cnxn, issue)
280 for comment in state.comments_dict[issue.local_id]:
281 total_comments += 1
282 comment.issue_id = issue_id
283 self.services.issue.InsertComment(cnxn, comment)
284 self.services.issue_star.SetStarsBatch(
285 cnxn, self.services, config, issue_id,
286 state.starrers_dict[issue.local_id], True)
287 total_stars += len(state.starrers_dict[issue.local_id])
288
289 event_log.append('Created %d issues' % len(state.issue_list))
290 event_log.append('Created %d comments for %d issues' % (
291 total_comments, len(state.comments_dict)))
292 event_log.append('Set %d stars on %d issues' % (
293 total_stars, len(state.starrers_dict)))
294
295 global_relations_dict = collections.defaultdict(list)
296 for issue, rels in state.relations_dict.items():
297 src_iid = self.services.issue.GetIssueByLocalID(
298 cnxn, project_id, issue).issue_id
299 dst_iids = [i.issue_id for i in self.services.issue.GetIssuesByLocalIDs(
300 cnxn, project_id, [rel[0] for rel in rels])]
301 kinds = [rel[1] for rel in rels]
302 global_relations_dict[src_iid] = list(zip(dst_iids, kinds))
303 self.services.issue.RelateIssues(cnxn, global_relations_dict)
304
305 self.services.issue.SetUsedLocalID(cnxn, project_id)
306 event_log.append('Finished import')
307
Adrià Vilanova Martínezde942802022-07-15 14:06:55 +0200308 # def GetIssueImport(self, **kwargs):
309 # return self.handler(**kwargs)
310
311 # def PostIssueImport(self, **kwargs):
312 # return self.handler(**kwargs)
313
Copybara854996b2021-09-07 19:36:02 +0000314
315class JSONImportError(Exception):
316 """Exception to raise if imported JSON is invalid."""
317 pass