Package coprs :: Package logic :: Module builds_logic
[hide private]
[frames] | no frames]

Source Code for Module coprs.logic.builds_logic

   1  import tempfile 
   2  import shutil 
   3  import json 
   4  import os 
   5  import pprint 
   6  import time 
   7  import requests 
   8   
   9  from sqlalchemy.sql import text 
  10  from sqlalchemy.sql.expression import not_ 
  11  from sqlalchemy.orm import joinedload, selectinload 
  12  from sqlalchemy import func, desc, or_, and_ 
  13  from sqlalchemy.sql import false,true 
  14  from werkzeug.utils import secure_filename 
  15  from sqlalchemy import bindparam, Integer, String 
  16  from sqlalchemy.exc import IntegrityError 
  17   
  18  from copr_common.enums import FailTypeEnum, StatusEnum 
  19  from coprs import app 
  20  from coprs import cache 
  21  from coprs import db 
  22  from coprs import models 
  23  from coprs import helpers 
  24  from coprs.exceptions import ( 
  25      ActionInProgressException, 
  26      BadRequest, 
  27      ConflictingRequest, 
  28      DuplicateException, 
  29      InsufficientRightsException, 
  30      InsufficientStorage, 
  31      MalformedArgumentException, 
  32      UnrepeatableBuildException, 
  33  ) 
  34   
  35  from coprs.logic import coprs_logic 
  36  from coprs.logic import users_logic 
  37  from coprs.logic.actions_logic import ActionsLogic 
  38  from coprs.logic.dist_git_logic import DistGitLogic 
  39  from coprs.models import BuildChroot 
  40  from coprs.logic.coprs_logic import MockChrootsLogic 
  41  from coprs.logic.packages_logic import PackagesLogic 
  42  from coprs.logic.batches_logic import BatchesLogic 
  43   
  44  from .helpers import get_graph_parameters 
  45  log = app.logger 
  46   
  47   
  48  PROCESSING_STATES = [StatusEnum(s) for s in [ 
  49      "running", "pending", "starting", "importing", "waiting", 
  50  ]] 
51 52 53 -class BuildsLogic(object):
54 @classmethod
55 - def get(cls, build_id):
56 return models.Build.query.filter(models.Build.id == build_id)
57 58 @classmethod
59 - def get_build_tasks(cls, status, background=None):
60 """ Returns tasks with given status. If background is specified then 61 returns normal jobs (false) or background jobs (true) 62 """ 63 result = models.BuildChroot.query.join(models.Build)\ 64 .filter(models.BuildChroot.status == status)\ 65 .order_by(models.Build.id.asc()) 66 if background is not None: 67 result = result.filter(models.Build.is_background == (true() if background else false())) 68 return result
69 70 @classmethod
71 - def get_srpm_build_tasks(cls, status, background=None):
72 """ Returns source build tasks with given status. If background is 73 specified then returns normal jobs (false) or background jobs (true) 74 """ 75 result = models.Build.query\ 76 .filter(models.Build.source_status == status)\ 77 .order_by(models.Build.id.asc()) 78 if background is not None: 79 result = result.filter(models.Build.is_background == (true() if background else false())) 80 return result
81 82 @classmethod 83 @cache.memoize(timeout=2*60)
84 - def get_recent_task_ids(cls, user=None, limit=100, period_days=2):
85 query_args = ( 86 models.BuildChroot.build_id, 87 func.max(models.BuildChroot.ended_on).label('max_ended_on'), 88 models.Build.submitted_on, 89 ) 90 group_by_args = ( 91 models.BuildChroot.build_id, 92 models.Build.submitted_on, 93 ) 94 95 96 if user: 97 query_args += (models.Build.user_id,) 98 group_by_args += (models.Build.user_id,) 99 100 subquery = (db.session.query(*query_args) 101 .join(models.Build) 102 .group_by(*group_by_args) 103 .having(func.count() == func.count(models.BuildChroot.ended_on)) 104 .having(models.Build.submitted_on > time.time() - 3600*24*period_days) 105 ) 106 if user: 107 subquery = subquery.having(models.Build.user_id == user.id) 108 109 subquery = subquery.order_by(desc('max_ended_on')).limit(limit).subquery() 110 111 query = models.Build.query.join(subquery, subquery.c.build_id == models.Build.id) 112 return [i.id for i in query.all()]
113 114 @classmethod
115 - def get_recent_tasks(cls, *args, **kwargs):
116 task_ids = cls.get_recent_task_ids(*args, **kwargs) 117 query = models.Build.query.filter(models.Build.id.in_(task_ids)) 118 return sorted(query.all(), key=lambda o: task_ids.index(o.id))
119 120 @classmethod
121 - def get_running_tasks_by_time(cls, start, end):
122 result = models.BuildChroot.query\ 123 .filter(models.BuildChroot.ended_on > start)\ 124 .filter(models.BuildChroot.started_on < end)\ 125 .order_by(models.BuildChroot.started_on.asc()) 126 127 return result
128 129 @classmethod
130 - def get_chroot_histogram(cls, start, end):
131 chroots = [] 132 chroot_query = BuildChroot.query\ 133 .filter(models.BuildChroot.started_on < end)\ 134 .filter(models.BuildChroot.ended_on > start)\ 135 .with_entities(BuildChroot.mock_chroot_id, 136 func.count(BuildChroot.mock_chroot_id))\ 137 .group_by(BuildChroot.mock_chroot_id)\ 138 .order_by(BuildChroot.mock_chroot_id) 139 140 for chroot in chroot_query: 141 chroots.append([chroot[0], chroot[1]]) 142 143 mock_chroots = coprs_logic.MockChrootsLogic.get_multiple() 144 for mock_chroot in mock_chroots: 145 for l in chroots: 146 if l[0] == mock_chroot.id: 147 l[0] = mock_chroot.name 148 149 return chroots
150 151 @classmethod
152 - def get_pending_jobs_bucket(cls, start, end):
153 query = text(""" 154 SELECT COUNT(*) as result 155 FROM build_chroot JOIN build on build.id = build_chroot.build_id 156 WHERE 157 build.submitted_on < :end 158 AND ( 159 build_chroot.started_on > :start 160 OR (build_chroot.started_on is NULL AND build_chroot.status = :status) 161 -- for currently pending builds we need to filter on status=pending because there might be 162 -- failed builds that have started_on=NULL 163 ) 164 AND NOT build.canceled 165 """) 166 167 res = db.engine.execute(query, start=start, end=end, status=StatusEnum("pending")) 168 return res.first().result
169 170 @classmethod
171 - def get_running_jobs_bucket(cls, start, end):
172 query = text(""" 173 SELECT COUNT(*) as result 174 FROM build_chroot 175 WHERE 176 started_on < :end 177 AND (ended_on > :start OR (ended_on is NULL AND status = :status)) 178 -- for currently running builds we need to filter on status=running because there might be failed 179 -- builds that have ended_on=NULL 180 """) 181 182 res = db.engine.execute(query, start=start, end=end, status=StatusEnum("running")) 183 return res.first().result
184 185 @classmethod
186 - def get_cached_graph_data(cls, params):
187 data = { 188 "pending": [], 189 "running": [], 190 } 191 result = models.BuildsStatistics.query\ 192 .filter(models.BuildsStatistics.stat_type == params["type"])\ 193 .filter(models.BuildsStatistics.time >= params["start"])\ 194 .filter(models.BuildsStatistics.time <= params["end"])\ 195 .order_by(models.BuildsStatistics.time) 196 197 for row in result: 198 data["pending"].append(row.pending) 199 data["running"].append(row.running) 200 201 return data
202 203 @classmethod
204 - def get_task_graph_data(cls, type):
205 data = [["pending"], ["running"], ["avg running"], ["time"]] 206 params = get_graph_parameters(type) 207 cached_data = cls.get_cached_graph_data(params) 208 data[0].extend(cached_data["pending"]) 209 data[1].extend(cached_data["running"]) 210 211 for i in range(len(data[0]) - 1, params["steps"]): 212 step_start = params["start"] + i * params["step"] 213 step_end = step_start + params["step"] 214 pending = cls.get_pending_jobs_bucket(step_start, step_end) 215 running = cls.get_running_jobs_bucket(step_start, step_end) 216 data[0].append(pending) 217 data[1].append(running) 218 cls.cache_graph_data(type, time=step_start, pending=pending, running=running) 219 220 running_total = 0 221 for i in range(1, params["steps"] + 1): 222 running_total += data[1][i] 223 224 data[2].extend([running_total * 1.0 / params["steps"]] * (len(data[0]) - 1)) 225 226 for i in range(params["start"], params["end"], params["step"]): 227 data[3].append(time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(i))) 228 229 return data
230 231 @classmethod
232 - def get_small_graph_data(cls, type):
233 data = [[""]] 234 params = get_graph_parameters(type) 235 cached_data = cls.get_cached_graph_data(params) 236 data[0].extend(cached_data["running"]) 237 238 for i in range(len(data[0]) - 1, params["steps"]): 239 step_start = params["start"] + i * params["step"] 240 step_end = step_start + params["step"] 241 running = cls.get_running_jobs_bucket(step_start, step_end) 242 data[0].append(running) 243 cls.cache_graph_data(type, time=step_start, running=running) 244 245 return data
246 247 @classmethod
248 - def cache_graph_data(cls, type, time, pending=0, running=0):
249 result = models.BuildsStatistics.query\ 250 .filter(models.BuildsStatistics.stat_type == type)\ 251 .filter(models.BuildsStatistics.time == time).first() 252 if result: 253 return 254 255 try: 256 cached_data = models.BuildsStatistics( 257 time = time, 258 stat_type = type, 259 running = running, 260 pending = pending 261 ) 262 db.session.add(cached_data) 263 db.session.commit() 264 except IntegrityError: # other process already calculated the graph data and cached it 265 db.session.rollback()
266 267 @classmethod
268 - def get_build_importing_queue(cls, background=None):
269 """ 270 Returns Builds which are waiting to be uploaded to dist git 271 """ 272 query = (models.Build.query 273 .filter(models.Build.canceled == false()) 274 .filter(models.Build.source_status == StatusEnum("importing")) 275 .order_by(models.Build.id.asc())) 276 if background is not None: 277 query = query.filter(models.Build.is_background == (true() if background else false())) 278 return query
279 280 @classmethod
281 - def get_pending_srpm_build_tasks(cls, background=None):
282 query = (models.Build.query 283 .filter(models.Build.canceled == false()) 284 .filter(models.Build.source_status == StatusEnum("pending")) 285 .order_by(models.Build.is_background.asc(), models.Build.id.asc())) 286 if background is not None: 287 query = query.filter(models.Build.is_background == (true() if background else false())) 288 return query
289 290 @classmethod
291 - def get_pending_build_tasks(cls, background=None, for_backend=False):
292 """ 293 Get list of BuildChroot objects that are to be (re)processed. 294 """ 295 296 todo_states = ["pending"] 297 if for_backend: 298 # In case of accident and backend VM reboot, the background build 299 # workers are abruptly terminated. We list them here too so backend 300 # can re-process them after reboot so they don't stay in "running" 301 # state forever. 302 todo_states += ["starting", "running"] 303 304 query = ( 305 models.BuildChroot.query 306 .join(models.Build) 307 .join(models.CoprDir) 308 # TODO: BuildChroot objects should be self-standing. The thing is 309 # that this is racy -- Package reference provides some build 310 # configuration which can be changed in the middle of the 311 # BuildChroot processing. 312 .join(models.Package, models.Package.id == models.Build.package_id) 313 .options(joinedload('build').joinedload('copr_dir'), 314 joinedload('build').joinedload('package')) 315 .filter(models.Build.canceled == false()) 316 .filter(models.BuildChroot.status.in_(StatusEnum(x) for x in todo_states)) 317 .order_by(models.Build.is_background.asc(), models.Build.id.asc())) 318 if background is not None: 319 query = query.filter(models.Build.is_background == (true() if background else false())) 320 return query
321 322 @classmethod
323 - def get_build_task(cls, task_id):
324 try: 325 build_id, chroot_name = task_id.split("-", 1) 326 except ValueError: 327 raise MalformedArgumentException("Invalid task_id {}".format(task_id)) 328 329 build_chroot = BuildChrootsLogic.get_by_build_id_and_name(build_id, chroot_name) 330 return build_chroot.join(models.Build).first()
331 332 @classmethod
333 - def get_srpm_build_task(cls, build_id):
334 return BuildsLogic.get_by_id(build_id).first()
335 336 @classmethod
337 - def get_multiple(cls):
338 return models.Build.query.order_by(models.Build.id.desc())
339 340 @classmethod
341 - def get_multiple_by_copr(cls, copr):
342 """ Get collection of builds in copr sorted by build_id descending 343 """ 344 return cls.get_multiple().filter(models.Build.copr == copr)
345 346 @classmethod
347 - def get_multiple_by_user(cls, user):
348 """ Get collection of builds in copr sorted by build_id descending 349 form the copr belonging to `user` 350 """ 351 return cls.get_multiple().join(models.Build.copr).filter( 352 models.Copr.user == user)
353 354 @classmethod
355 - def get_copr_builds_list(cls, copr, dirname=None):
356 query = models.Build.query.filter(models.Build.copr_id==copr.id) 357 if dirname: 358 copr_dir = coprs_logic.CoprDirsLogic.get_by_copr(copr, dirname).one() 359 else: 360 copr_dir = copr.main_dir 361 query = query.filter(models.Build.copr_dir_id==copr_dir.id) 362 query = query.options(selectinload('build_chroots'), selectinload('package')) 363 return query
364 365 @classmethod
366 - def join_group(cls, query):
367 return query.join(models.Copr).outerjoin(models.Group)
368 369 @classmethod
370 - def get_multiple_by_name(cls, username, coprname):
371 query = cls.get_multiple() 372 return (query.join(models.Build.copr) 373 .options(db.contains_eager(models.Build.copr)) 374 .join(models.Copr.user) 375 .filter(models.Copr.name == coprname) 376 .filter(models.User.username == username))
377 378 @classmethod
379 - def get_by_ids(cls, ids):
380 return models.Build.query.filter(models.Build.id.in_(ids))
381 382 @classmethod
383 - def get_by_id(cls, build_id):
384 return models.Build.query.filter(models.Build.id == build_id)
385 386 @classmethod
387 - def create_new_from_other_build(cls, user, copr, source_build, 388 chroot_names=None, **build_options):
389 skip_import = False 390 git_hashes = {} 391 392 if source_build.source_type == helpers.BuildSourceEnum('upload'): 393 if source_build.repeatable: 394 skip_import = True 395 for chroot in source_build.build_chroots: 396 git_hashes[chroot.name] = chroot.git_hash 397 else: 398 raise UnrepeatableBuildException("Build sources were not fully imported into CoprDistGit.") 399 400 build = cls.create_new(user, copr, source_build.source_type, source_build.source_json, chroot_names, 401 pkgs=source_build.pkgs, git_hashes=git_hashes, skip_import=skip_import, 402 srpm_url=source_build.srpm_url, copr_dirname=source_build.copr_dir.name, **build_options) 403 build.package_id = source_build.package_id 404 build.pkg_version = source_build.pkg_version 405 build.resubmitted_from_id = source_build.id 406 407 return build
408 409 @classmethod
410 - def create_new_from_url(cls, user, copr, url, chroot_names=None, 411 copr_dirname=None, **build_options):
412 """ 413 :type user: models.User 414 :type copr: models.Copr 415 416 :type chroot_names: List[str] 417 418 :rtype: models.Build 419 """ 420 source_type = helpers.BuildSourceEnum("link") 421 source_json = json.dumps({"url": url}) 422 srpm_url = None if url.endswith('.spec') else url 423 return cls.create_new(user, copr, source_type, source_json, chroot_names, 424 pkgs=url, srpm_url=srpm_url, copr_dirname=copr_dirname, **build_options)
425 426 @classmethod
427 - def create_new_from_scm(cls, user, copr, scm_type, clone_url, 428 committish='', subdirectory='', spec='', srpm_build_method='rpkg', 429 chroot_names=None, copr_dirname=None, **build_options):
430 """ 431 :type user: models.User 432 :type copr: models.Copr 433 434 :type chroot_names: List[str] 435 436 :rtype: models.Build 437 """ 438 source_type = helpers.BuildSourceEnum("scm") 439 source_json = json.dumps({"type": scm_type, 440 "clone_url": clone_url, 441 "committish": committish, 442 "subdirectory": subdirectory, 443 "spec": spec, 444 "srpm_build_method": srpm_build_method}) 445 return cls.create_new(user, copr, source_type, source_json, chroot_names, copr_dirname=copr_dirname, **build_options)
446 447 @classmethod
448 - def create_new_from_pypi(cls, user, copr, pypi_package_name, pypi_package_version, spec_template, 449 python_versions, chroot_names=None, copr_dirname=None, **build_options):
450 """ 451 :type user: models.User 452 :type copr: models.Copr 453 :type package_name: str 454 :type version: str 455 :type python_versions: List[str] 456 457 :type chroot_names: List[str] 458 459 :rtype: models.Build 460 """ 461 source_type = helpers.BuildSourceEnum("pypi") 462 source_json = json.dumps({"pypi_package_name": pypi_package_name, 463 "pypi_package_version": pypi_package_version, 464 "spec_template": spec_template, 465 "python_versions": python_versions}) 466 return cls.create_new(user, copr, source_type, source_json, chroot_names, copr_dirname=copr_dirname, **build_options)
467 468 @classmethod
469 - def create_new_from_rubygems(cls, user, copr, gem_name, chroot_names=None, 470 copr_dirname=None, **build_options):
471 """ 472 :type user: models.User 473 :type copr: models.Copr 474 :type gem_name: str 475 :type chroot_names: List[str] 476 :rtype: models.Build 477 """ 478 source_type = helpers.BuildSourceEnum("rubygems") 479 source_json = json.dumps({"gem_name": gem_name}) 480 return cls.create_new(user, copr, source_type, source_json, chroot_names, copr_dirname=copr_dirname, **build_options)
481 482 @classmethod
483 - def create_new_from_custom(cls, user, copr, script, script_chroot=None, script_builddeps=None, 484 script_resultdir=None, chroot_names=None, copr_dirname=None, **kwargs):
485 """ 486 :type user: models.User 487 :type copr: models.Copr 488 :type script: str 489 :type script_chroot: str 490 :type script_builddeps: str 491 :type script_resultdir: str 492 :type chroot_names: List[str] 493 :rtype: models.Build 494 """ 495 source_type = helpers.BuildSourceEnum("custom") 496 source_dict = { 497 'script': script, 498 'chroot': script_chroot, 499 'builddeps': script_builddeps, 500 'resultdir': script_resultdir, 501 } 502 503 return cls.create_new(user, copr, source_type, json.dumps(source_dict), 504 chroot_names, copr_dirname=copr_dirname, **kwargs)
505 506 @classmethod
507 - def create_new_from_distgit(cls, user, copr, package_name, 508 distgit_name=None, distgit_namespace=None, 509 committish=None, chroot_names=None, 510 copr_dirname=None, **build_options):
511 """ Request build of package from DistGit repository """ 512 source_type = helpers.BuildSourceEnum("distgit") 513 source_dict = { 514 "clone_url": DistGitLogic.get_clone_url(distgit_name, package_name, 515 distgit_namespace), 516 } 517 if committish: 518 source_dict["committish"] = committish 519 520 return cls.create_new( 521 user, copr, source_type, json.dumps(source_dict), chroot_names, 522 copr_dirname=copr_dirname, **build_options)
523 524 @classmethod
525 - def create_new_from_upload(cls, user, copr, f_uploader, orig_filename, 526 chroot_names=None, copr_dirname=None, **build_options):
527 """ 528 :type user: models.User 529 :type copr: models.Copr 530 :param f_uploader(file_path): function which stores data at the given `file_path` 531 :return: 532 """ 533 tmp = None 534 try: 535 tmp = tempfile.mkdtemp(dir=app.config["STORAGE_DIR"]) 536 tmp_name = os.path.basename(tmp) 537 filename = secure_filename(orig_filename) 538 file_path = os.path.join(tmp, filename) 539 f_uploader(file_path) 540 except OSError as error: 541 if tmp: 542 shutil.rmtree(tmp) 543 raise InsufficientStorage("Can not create storage directory for uploaded file: {}".format(str(error))) 544 545 # make the pkg public 546 pkg_url = "{baseurl}/tmp/{tmp_dir}/{filename}".format( 547 baseurl=app.config["PUBLIC_COPR_BASE_URL"], 548 tmp_dir=tmp_name, 549 filename=filename) 550 551 # create json describing the build source 552 source_type = helpers.BuildSourceEnum("upload") 553 source_json = json.dumps({"url": pkg_url, "pkg": filename, "tmp": tmp_name}) 554 srpm_url = None if pkg_url.endswith('.spec') else pkg_url 555 556 try: 557 build = cls.create_new(user, copr, source_type, source_json, 558 chroot_names, pkgs=pkg_url, srpm_url=srpm_url, 559 copr_dirname=copr_dirname, **build_options) 560 except Exception: 561 shutil.rmtree(tmp) # todo: maybe we should delete in some cleanup procedure? 562 raise 563 564 return build
565 566 @classmethod
567 - def create_new(cls, user, copr, source_type, source_json, chroot_names=None, pkgs="", 568 git_hashes=None, skip_import=False, background=False, batch=None, 569 srpm_url=None, copr_dirname=None, package=None, **build_options):
570 """ 571 :type user: models.User 572 :type copr: models.Copr 573 :type chroot_names: List[str] 574 :type source_type: int value from helpers.BuildSourceEnum 575 :type source_json: str in json format 576 :type pkgs: str 577 :type git_hashes: dict 578 :type skip_import: bool 579 :type background: bool 580 :type batch: models.Batch 581 :rtype: models.Build 582 """ 583 if not copr.active_copr_chroots: 584 raise BadRequest("Can't create build - project {} has no active chroots".format(copr.full_name)) 585 586 chroots = None 587 if chroot_names: 588 chroots = [] 589 for chroot in copr.active_chroots: 590 if chroot.name in chroot_names: 591 chroots.append(chroot) 592 593 build = cls.add( 594 user=user, 595 package=package, 596 pkgs=pkgs, 597 copr=copr, 598 chroots=chroots, 599 source_type=source_type, 600 source_json=source_json, 601 enable_net=build_options.get("enable_net", copr.build_enable_net), 602 background=background, 603 git_hashes=git_hashes, 604 skip_import=skip_import, 605 batch=batch, 606 srpm_url=srpm_url, 607 copr_dirname=copr_dirname, 608 bootstrap=build_options.get("bootstrap"), 609 after_build_id=build_options.get("after_build_id"), 610 with_build_id=build_options.get("with_build_id"), 611 ) 612 613 if "timeout" in build_options: 614 build.timeout = build_options["timeout"] 615 616 return build
617 618 @classmethod
619 - def _setup_batch(cls, batch, after_build_id, with_build_id, user):
620 # those three are exclusive! 621 if sum([bool(x) for x in 622 [batch, with_build_id, after_build_id]]) > 1: 623 raise BadRequest("Multiple build batch specifiers") 624 625 if with_build_id: 626 batch = BatchesLogic.get_batch_or_create(with_build_id, user, True) 627 628 if after_build_id: 629 old_batch = BatchesLogic.get_batch_or_create(after_build_id, user) 630 batch = models.Batch() 631 batch.blocked_by = old_batch 632 db.session.add(batch) 633 634 return batch
635 636 @classmethod
637 - def add(cls, user, pkgs, copr, source_type=None, source_json=None, 638 repos=None, chroots=None, timeout=None, enable_net=True, 639 git_hashes=None, skip_import=False, background=False, batch=None, 640 srpm_url=None, copr_dirname=None, bootstrap=None, 641 package=None, after_build_id=None, with_build_id=None):
642 643 if chroots is None: 644 chroots = [] 645 646 coprs_logic.CoprsLogic.raise_if_unfinished_blocking_action( 647 copr, "Can't build while there is an operation in progress: {action}") 648 users_logic.UsersLogic.raise_if_cant_build_in_copr( 649 user, copr, 650 "You don't have permissions to build in this copr.") 651 652 batch = cls._setup_batch(batch, after_build_id, with_build_id, user) 653 654 if not repos: 655 repos = copr.repos 656 657 # todo: eliminate pkgs and this check 658 if pkgs and (" " in pkgs or "\n" in pkgs or "\t" in pkgs or pkgs.strip() != pkgs): 659 raise MalformedArgumentException("Trying to create a build using src_pkg " 660 "with bad characters. Forgot to split?") 661 662 # just temporary to keep compatibility 663 if not source_type or not source_json: 664 source_type = helpers.BuildSourceEnum("link") 665 source_json = json.dumps({"url":pkgs}) 666 667 if skip_import and srpm_url: 668 chroot_status = StatusEnum("pending") 669 source_status = StatusEnum("succeeded") 670 else: 671 chroot_status = StatusEnum("waiting") 672 source_status = StatusEnum("pending") 673 674 copr_dir = None 675 if copr_dirname: 676 if not copr_dirname.startswith(copr.name+':') and copr_dirname != copr.name: 677 raise MalformedArgumentException("Copr dirname not starting with copr name.") 678 copr_dir = coprs_logic.CoprDirsLogic.get_or_create(copr, copr_dirname) 679 680 build = models.Build( 681 user=user, 682 package=package, 683 pkgs=pkgs, 684 copr=copr, 685 repos=repos, 686 source_type=source_type, 687 source_json=source_json, 688 source_status=source_status, 689 submitted_on=int(time.time()), 690 enable_net=bool(enable_net), 691 is_background=bool(background), 692 batch=batch, 693 srpm_url=srpm_url, 694 copr_dir=copr_dir, 695 bootstrap=bootstrap, 696 ) 697 698 if timeout: 699 build.timeout = timeout or app.config["DEFAULT_BUILD_TIMEOUT"] 700 701 db.session.add(build) 702 703 for chroot in chroots: 704 # Chroots were explicitly set per-build. 705 git_hash = None 706 if git_hashes: 707 git_hash = git_hashes.get(chroot.name) 708 buildchroot = BuildChrootsLogic.new( 709 build=build, 710 status=chroot_status, 711 mock_chroot=chroot, 712 git_hash=git_hash, 713 ) 714 db.session.add(buildchroot) 715 716 return build
717 718 @classmethod
719 - def rebuild_package(cls, package, source_dict_update={}, copr_dir=None, update_callback=None, 720 scm_object_type=None, scm_object_id=None, 721 scm_object_url=None, submitted_by=None):
722 """ 723 Rebuild a concrete package by a webhook. This is different from 724 create_new() because we don't have a concrete 'user' who submits this 725 (only submitted_by string). 726 """ 727 728 source_dict = package.source_json_dict 729 source_dict.update(source_dict_update) 730 source_json = json.dumps(source_dict) 731 732 if not copr_dir: 733 copr_dir = package.copr.main_dir 734 735 build = models.Build( 736 user=None, 737 pkgs=None, 738 package=package, 739 copr=package.copr, 740 repos=package.copr.repos, 741 source_status=StatusEnum("pending"), 742 source_type=package.source_type, 743 source_json=source_json, 744 submitted_on=int(time.time()), 745 enable_net=package.copr.build_enable_net, 746 timeout=app.config["DEFAULT_BUILD_TIMEOUT"], 747 copr_dir=copr_dir, 748 update_callback=update_callback, 749 scm_object_type=scm_object_type, 750 scm_object_id=scm_object_id, 751 scm_object_url=scm_object_url, 752 submitted_by=submitted_by, 753 ) 754 db.session.add(build) 755 756 status = StatusEnum("waiting") 757 for chroot in package.chroots: 758 buildchroot = BuildChrootsLogic.new( 759 build=build, 760 status=status, 761 mock_chroot=chroot, 762 git_hash=None 763 ) 764 db.session.add(buildchroot) 765 766 cls.process_update_callback(build) 767 return build
768 769 770 terminal_states = {StatusEnum("failed"), StatusEnum("succeeded"), StatusEnum("canceled")} 771 772 @classmethod
773 - def get_buildchroots_by_build_id_and_branch(cls, build_id, branch):
774 """ 775 Returns a list of BuildChroots identified by build_id and dist-git 776 branch name. 777 """ 778 return ( 779 models.BuildChroot.query 780 .join(models.MockChroot) 781 .filter(models.BuildChroot.build_id==build_id) 782 .filter(models.MockChroot.distgit_branch_name==branch) 783 ).all()
784 785 786 @classmethod
787 - def delete_local_source(cls, build):
788 """ 789 Deletes the locally stored data for build purposes. This is typically 790 uploaded srpm file, uploaded spec file or webhook POST content. 791 """ 792 # is it hosted on the copr frontend? 793 data = json.loads(build.source_json) 794 if 'tmp' in data: 795 tmp = data["tmp"] 796 storage_path = app.config["STORAGE_DIR"] 797 try: 798 shutil.rmtree(os.path.join(storage_path, tmp)) 799 except: 800 pass
801 802 803 @classmethod
804 - def update_state_from_dict(cls, build, upd_dict):
805 """ 806 :param build: 807 :param upd_dict: 808 example: 809 { 810 "builds":[ 811 { 812 "id": 1, 813 "copr_id": 2, 814 "started_on": 1390866440 815 }, 816 { 817 "id": 2, 818 "copr_id": 1, 819 "status": 0, 820 "chroot": "fedora-18-x86_64", 821 "result_dir": "baz", 822 "ended_on": 1390866440 823 }] 824 } 825 """ 826 log.info("Updating build {} by: {}".format(build.id, upd_dict)) 827 828 pkg_name = upd_dict.get('pkg_name', None) 829 if not build.package and pkg_name: 830 # assign the package if it isn't already 831 if not PackagesLogic.get(build.copr_dir.id, pkg_name).first(): 832 # create the package if it doesn't exist 833 try: 834 package = PackagesLogic.add( 835 build.copr.user, build.copr_dir, 836 pkg_name, build.source_type, build.source_json) 837 db.session.add(package) 838 db.session.commit() 839 except (IntegrityError, DuplicateException) as e: 840 app.logger.exception(e) 841 db.session.rollback() 842 return 843 build.package = PackagesLogic.get(build.copr_dir.id, pkg_name).first() 844 845 for attr in ["built_packages", "srpm_url", "pkg_version"]: 846 value = upd_dict.get(attr, None) 847 if value: 848 setattr(build, attr, value) 849 850 # update source build status 851 if str(upd_dict.get("task_id")) == str(build.task_id): 852 build.result_dir = upd_dict.get("result_dir", "") 853 854 new_status = upd_dict.get("status") 855 if new_status == StatusEnum("succeeded"): 856 new_status = StatusEnum("importing") 857 chroot_status=StatusEnum("waiting") 858 if not build.build_chroots: 859 # create the BuildChroots from Package setting, if not 860 # already set explicitly for concrete build 861 for chroot in build.package.chroots: 862 buildchroot = BuildChrootsLogic.new( 863 build=build, 864 status=chroot_status, 865 mock_chroot=chroot, 866 git_hash=None, 867 ) 868 db.session.add(buildchroot) 869 else: 870 for buildchroot in build.build_chroots: 871 buildchroot.status = chroot_status 872 db.session.add(buildchroot) 873 874 build.source_status = new_status 875 if new_status == StatusEnum("failed") or \ 876 new_status == StatusEnum("skipped"): 877 for ch in build.build_chroots: 878 ch.status = new_status 879 ch.ended_on = upd_dict.get("ended_on") or time.time() 880 ch.started_on = upd_dict.get("started_on", ch.ended_on) 881 db.session.add(ch) 882 883 if new_status == StatusEnum("failed"): 884 build.fail_type = FailTypeEnum("srpm_build_error") 885 886 cls.process_update_callback(build) 887 db.session.add(build) 888 return 889 890 if "chroot" in upd_dict: 891 # update respective chroot status 892 for build_chroot in build.build_chroots: 893 if build_chroot.name == upd_dict["chroot"]: 894 build_chroot.result_dir = upd_dict.get("result_dir", "") 895 896 if "status" in upd_dict and build_chroot.status not in BuildsLogic.terminal_states: 897 build_chroot.status = upd_dict["status"] 898 899 if upd_dict.get("status") in BuildsLogic.terminal_states: 900 build_chroot.ended_on = upd_dict.get("ended_on") or time.time() 901 902 if upd_dict.get("status") == StatusEnum("starting"): 903 build_chroot.started_on = upd_dict.get("started_on") or time.time() 904 905 db.session.add(build_chroot) 906 907 # If the last package of a module was successfully built, 908 # then send an action to create module repodata on backend 909 if (build.module 910 and upd_dict.get("status") == StatusEnum("succeeded") 911 and all(b.status == StatusEnum("succeeded") for b in build.module.builds)): 912 ActionsLogic.send_build_module(build.copr, build.module) 913 914 cls.process_update_callback(build) 915 db.session.add(build)
916 917 @classmethod
918 - def process_update_callback(cls, build):
919 parsed_git_url = helpers.get_parsed_git_url(build.copr.scm_repo_url) 920 if not parsed_git_url: 921 return 922 923 if build.update_callback == 'pagure_flag_pull_request': 924 api_url = 'https://{0}/api/0/{1}/pull-request/{2}/flag'.format( 925 parsed_git_url.netloc, parsed_git_url.path, build.scm_object_id) 926 return cls.pagure_flag(build, api_url) 927 928 elif build.update_callback == 'pagure_flag_commit': 929 api_url = 'https://{0}/api/0/{1}/c/{2}/flag'.format( 930 parsed_git_url.netloc, parsed_git_url.path, build.scm_object_id) 931 return cls.pagure_flag(build, api_url)
932 933 @classmethod
934 - def pagure_flag(cls, build, api_url):
935 headers = { 936 'Authorization': 'token {}'.format(build.copr.scm_api_auth.get('api_key')) 937 } 938 939 if build.srpm_url: 940 progress = 50 941 else: 942 progress = 10 943 944 state_table = { 945 'failed': ('failure', 0), 946 'succeeded': ('success', 100), 947 'canceled': ('canceled', 0), 948 'running': ('pending', progress), 949 'pending': ('pending', progress), 950 'skipped': ('error', 0), 951 'starting': ('pending', progress), 952 'importing': ('pending', progress), 953 'forked': ('error', 0), 954 'waiting': ('pending', progress), 955 'unknown': ('error', 0), 956 } 957 958 build_url = os.path.join( 959 app.config['PUBLIC_COPR_BASE_URL'], 960 'coprs', build.copr.full_name.replace('@', 'g/'), 961 'build', str(build.id) 962 ) 963 964 data = { 965 'username': 'Copr build', 966 'comment': '#{}'.format(build.id), 967 'url': build_url, 968 'status': state_table[build.state][0], 969 'percent': state_table[build.state][1], 970 'uid': str(build.id), 971 } 972 973 log.debug('Sending data to Pagure API: %s', pprint.pformat(data)) 974 response = requests.post(api_url, data=data, headers=headers) 975 log.debug('Pagure API response: %s', response.text)
976 977 @classmethod
978 - def cancel_build(cls, user, build):
979 if not user.can_build_in(build.copr): 980 raise InsufficientRightsException( 981 "You are not allowed to cancel this build.") 982 if not build.cancelable: 983 err_msg = "Cannot cancel build {}".format(build.id) 984 raise ConflictingRequest(err_msg) 985 986 # No matter the state, we tell backend to cancel this build. Even when 987 # the build is in pending state (worker manager may be already handling 988 # this build ATM, and creating an asynchronous worker which needs to be 989 # canceled). 990 ActionsLogic.send_cancel_build(build) 991 992 build.canceled = True 993 cls.process_update_callback(build)
994 995 996 @classmethod
997 - def check_build_to_delete(cls, user, build):
998 """ 999 :type user: models.User 1000 :type build: models.Build 1001 """ 1002 if not user.can_edit(build.copr) or build.persistent: 1003 raise InsufficientRightsException( 1004 "You are not allowed to delete build `{}`.".format(build.id)) 1005 1006 if not build.finished: 1007 raise ActionInProgressException( 1008 "You can not delete build `{}` which is not finished.".format(build.id), 1009 "Unfinished build")
1010 1011 @classmethod
1012 - def delete_build(cls, user, build, send_delete_action=True):
1013 """ 1014 :type user: models.User 1015 :type build: models.Build 1016 """ 1017 cls.check_build_to_delete(user, build) 1018 1019 if send_delete_action: 1020 ActionsLogic.send_delete_build(build) 1021 1022 db.session.delete(build)
1023 1024 @classmethod
1025 - def delete_builds(cls, user, build_ids):
1026 """ 1027 Delete builds specified by list of IDs 1028 1029 :type user: models.User 1030 :type build_ids: list of Int 1031 """ 1032 to_delete = [] 1033 no_permission = [] 1034 still_running = [] 1035 1036 build_ids = set(build_ids) 1037 builds = cls.get_by_ids(build_ids) 1038 for build in builds: 1039 try: 1040 cls.check_build_to_delete(user, build) 1041 to_delete.append(build) 1042 except InsufficientRightsException: 1043 no_permission.append(build.id) 1044 except ActionInProgressException: 1045 still_running.append(build.id) 1046 finally: 1047 build_ids.remove(build.id) 1048 1049 if build_ids or no_permission or still_running: 1050 msg = "" 1051 if no_permission: 1052 msg += "You don't have permissions to delete build(s) {0}.\n"\ 1053 .format(", ".join(map(str, no_permission))) 1054 if still_running: 1055 msg += "Build(s) {0} are still running.\n"\ 1056 .format(", ".join(map(str, still_running))) 1057 if build_ids: 1058 msg += "Build(s) {0} don't exist.\n"\ 1059 .format(", ".join(map(str, build_ids))) 1060 1061 raise BadRequest(msg) 1062 1063 if to_delete: 1064 ActionsLogic.send_delete_multiple_builds(to_delete) 1065 1066 for build in to_delete: 1067 for build_chroot in build.build_chroots: 1068 db.session.delete(build_chroot) 1069 1070 db.session.delete(build)
1071 1072 @classmethod
1073 - def mark_as_failed(cls, build_id):
1074 """ 1075 Marks build as failed on all its non-finished chroots 1076 """ 1077 build = cls.get(build_id).one() 1078 chroots = filter(lambda x: x.status != StatusEnum("succeeded"), build.build_chroots) 1079 for chroot in chroots: 1080 chroot.status = StatusEnum("failed") 1081 if build.source_status != StatusEnum("succeeded"): 1082 build.source_status = StatusEnum("failed") 1083 cls.process_update_callback(build) 1084 return build
1085 1086 @classmethod
1087 - def last_modified(cls, copr):
1088 """ Get build datetime (as epoch) of last successful build 1089 1090 :arg copr: object of copr 1091 """ 1092 builds = cls.get_multiple_by_copr(copr) 1093 1094 last_build = ( 1095 builds.join(models.BuildChroot) 1096 .filter((models.BuildChroot.status == StatusEnum("succeeded")) 1097 | (models.BuildChroot.status == StatusEnum("skipped"))) 1098 .filter(models.BuildChroot.ended_on.isnot(None)) 1099 .order_by(models.BuildChroot.ended_on.desc()) 1100 ).first() 1101 if last_build: 1102 return last_build.ended_on 1103 else: 1104 return None
1105 1106 @classmethod
1107 - def filter_is_finished(cls, query, is_finished):
1108 # todo: check that ended_on is set correctly for all cases 1109 # e.g.: failed dist-git import, cancellation 1110 if is_finished: 1111 return query.join(models.BuildChroot).filter(models.BuildChroot.ended_on.isnot(None)) 1112 else: 1113 return query.join(models.BuildChroot).filter(models.BuildChroot.ended_on.is_(None))
1114 1115 @classmethod
1116 - def filter_by_group_name(cls, query, group_name):
1117 return query.filter(models.Group.name == group_name)
1118 1119 @classmethod
1120 - def filter_by_package_name(cls, query, package_name):
1121 return query.join(models.Package).filter(models.Package.name == package_name)
1122 1123 @classmethod
1124 - def clean_old_builds(cls):
1125 dirs = ( 1126 db.session.query( 1127 models.CoprDir.id, 1128 models.Package.id, 1129 models.Package.max_builds) 1130 .join(models.Build, models.Build.copr_dir_id==models.CoprDir.id) 1131 .join(models.Package) 1132 .filter(models.Package.max_builds > 0) 1133 .group_by( 1134 models.CoprDir.id, 1135 models.Package.max_builds, 1136 models.Package.id) 1137 .having(func.count(models.Build.id) > models.Package.max_builds) 1138 ) 1139 1140 for dir_id, package_id, limit in dirs.all(): 1141 delete_builds = ( 1142 models.Build.query.filter( 1143 models.Build.copr_dir_id==dir_id, 1144 models.Build.package_id==package_id) 1145 .order_by(desc(models.Build.id)) 1146 .offset(limit) 1147 .all() 1148 ) 1149 1150 for build in delete_builds: 1151 try: 1152 cls.delete_build(build.copr.user, build) 1153 except ActionInProgressException: 1154 # postpone this one to next day run 1155 log.error("Build(id={}) delete failed, unfinished action.".format(build.id))
1156 1157 @classmethod
1158 - def delete_orphaned_builds(cls):
1159 builds_to_delete = models.Build.query\ 1160 .join(models.Copr, models.Build.copr_id == models.Copr.id)\ 1161 .filter(models.Copr.deleted == True) 1162 1163 counter = 0 1164 for build in builds_to_delete: 1165 cls.delete_build(build.copr.user, build) 1166 counter += 1 1167 if counter >= 100: 1168 db.session.commit() 1169 counter = 0 1170 1171 if counter > 0: 1172 db.session.commit()
1173 1174 @classmethod
1175 - def processing_builds(cls):
1176 """ 1177 Query for all the builds which are not yet finished, it means all the 1178 builds that have non-finished source status, or any non-finished 1179 existing build chroot. 1180 """ 1181 build_ids_with_bch = db.session.query(BuildChroot.build_id).filter( 1182 BuildChroot.status.in_(PROCESSING_STATES), 1183 ) 1184 # skip waiting state, we need to fix issue #1539 1185 source_states = set(PROCESSING_STATES)-{StatusEnum("waiting")} 1186 return models.Build.query.filter(and_( 1187 not_(models.Build.canceled), 1188 or_( 1189 models.Build.id.in_(build_ids_with_bch), 1190 models.Build.source_status.in_(source_states), 1191 ), 1192 ))
1193
1194 1195 -class BuildChrootsLogic(object):
1196 @classmethod
1197 - def new(cls, build, mock_chroot, **kwargs):
1198 """ 1199 Create new instance of BuildChroot 1200 (which is not assigned to any session) 1201 1202 Each freshly created instance of BuildChroot has to be assigned to 1203 pre-existing Build and MockChroot, hence the mandatory arguments. 1204 """ 1205 copr_chroot = coprs_logic.CoprChrootsLogic.get_by_mock_chroot_id( 1206 build.copr, mock_chroot.id 1207 ).one() 1208 return models.BuildChroot( 1209 mock_chroot=mock_chroot, 1210 copr_chroot=copr_chroot, 1211 build=build, 1212 **kwargs, 1213 )
1214 1215 @classmethod
1216 - def get_by_build_id_and_name(cls, build_id, name):
1217 mc = MockChrootsLogic.get_from_name(name).one() 1218 1219 return ( 1220 BuildChroot.query 1221 .filter(BuildChroot.build_id == build_id) 1222 .filter(BuildChroot.mock_chroot_id == mc.id) 1223 )
1224 1225 @classmethod
1226 - def get_multiply(cls):
1227 query = ( 1228 models.BuildChroot.query 1229 .join(models.BuildChroot.build) 1230 .join(models.BuildChroot.mock_chroot) 1231 .join(models.Build.copr) 1232 .join(models.Copr.user) 1233 .outerjoin(models.Group) 1234 ) 1235 return query
1236 1237 @classmethod
1238 - def filter_by_build_id(cls, query, build_id):
1239 return query.filter(models.Build.id == build_id)
1240 1241 @classmethod
1242 - def filter_by_project_id(cls, query, project_id):
1243 return query.filter(models.Copr.id == project_id)
1244 1245 @classmethod
1246 - def filter_by_project_user_name(cls, query, username):
1247 return query.filter(models.User.username == username)
1248 1249 @classmethod
1250 - def filter_by_state(cls, query, state):
1251 return query.filter(models.BuildChroot.status == StatusEnum(state))
1252 1253 @classmethod
1254 - def filter_by_group_name(cls, query, group_name):
1255 return query.filter(models.Group.name == group_name)
1256 1257 @classmethod
1258 - def filter_by_copr_and_mock_chroot(cls, query, copr, mock_chroot):
1259 """ 1260 Filter BuildChroot query so it returns only instances related to 1261 particular Copr and MockChroot. 1262 """ 1263 return ( 1264 query.join(models.BuildChroot.build) 1265 .filter(models.BuildChroot.mock_chroot_id == mock_chroot.id) 1266 .filter(models.Build.copr_id == copr.id) 1267 )
1268 1269 @classmethod
1270 - def by_copr_and_mock_chroot(cls, copr, mock_chroot):
1271 """ 1272 Given Copr and MockChroot instances, return query object which provides 1273 a list of related BuildChroots. 1274 """ 1275 return cls.filter_by_copr_and_mock_chroot(BuildChroot.query, copr, 1276 mock_chroot)
1277
1278 1279 -class BuildsMonitorLogic(object):
1280 @classmethod
1281 - def get_monitor_data(cls, copr):
1282 query = """ 1283 SELECT 1284 package.id as package_id, 1285 package.name AS package_name, 1286 build.id AS build_id, 1287 build_chroot.status AS build_chroot_status, 1288 build.pkg_version AS build_pkg_version, 1289 mock_chroot.id AS mock_chroot_id, 1290 mock_chroot.os_release AS mock_chroot_os_release, 1291 mock_chroot.os_version AS mock_chroot_os_version, 1292 mock_chroot.arch AS mock_chroot_arch 1293 FROM package 1294 JOIN (SELECT 1295 MAX(build.id) AS max_build_id_for_chroot, 1296 build.package_id AS package_id, 1297 build_chroot.mock_chroot_id AS mock_chroot_id 1298 FROM build 1299 JOIN build_chroot 1300 ON build.id = build_chroot.build_id 1301 WHERE build.copr_id = {copr_id} 1302 AND build_chroot.status != 2 1303 GROUP BY build.package_id, 1304 build_chroot.mock_chroot_id) AS max_build_ids_for_a_chroot 1305 ON package.id = max_build_ids_for_a_chroot.package_id 1306 JOIN build 1307 ON build.id = max_build_ids_for_a_chroot.max_build_id_for_chroot 1308 JOIN build_chroot 1309 ON build_chroot.mock_chroot_id = max_build_ids_for_a_chroot.mock_chroot_id 1310 AND build_chroot.build_id = max_build_ids_for_a_chroot.max_build_id_for_chroot 1311 JOIN mock_chroot 1312 ON mock_chroot.id = max_build_ids_for_a_chroot.mock_chroot_id 1313 JOIN copr_dir ON build.copr_dir_id=copr_dir.id WHERE copr_dir.main IS TRUE 1314 ORDER BY package.name ASC, package.id ASC, mock_chroot.os_release ASC, mock_chroot.os_version ASC, mock_chroot.arch ASC 1315 """.format(copr_id=copr.id) 1316 rows = db.session.execute(query) 1317 return rows
1318