1515from collections import defaultdict
1616from datetime import datetime , timedelta
1717import fnmatch
18+ import hashlib
1819from hashlib import sha256
1920import json
2021import os
2122from pathlib import Path
2223import sqlalchemy
24+ from sqlalchemy .orm import Session as SA_Session
2325import tempfile
2426import time
2527from typing import Any , Callable , Dict , List , Optional , Set , Tuple , Union , \
4648from ..database .config_db_model import Product
4749from ..database .database import DBSession
4850from ..database .run_db_model import \
49- AnalysisInfo , AnalysisInfoChecker , AnalyzerStatistic , \
51+ AnalysisInfo , AnalysisInfoChecker , AnalysisInfoFile , AnalyzerStatistic , \
5052 BugPathEvent , BugReportPoint , \
5153 Checker , \
5254 ExtendedReportData , \
@@ -814,8 +816,8 @@ def __add_file_content(
814816 self ,
815817 session : DBSession ,
816818 source_file_name : str ,
817- content_hash : Optional [str ]
818- ):
819+ content_hash : Optional [str ] = None
820+ ) -> str :
819821 """
820822 Add the necessary file contents. If content_hash in None then this
821823 function calculates the content hash. Or if it's available at the
@@ -871,6 +873,8 @@ def __add_file_content(
871873 # the meantime.
872874 session .rollback ()
873875
876+ return content_hash
877+
874878 def __store_checker_identifiers (self , checkers : Set [Tuple [str , str ]]):
875879 """
876880 Stores the identifiers "(analyzer, checker_name)" in the database into
@@ -1000,6 +1004,28 @@ def __store_analysis_statistics(
10001004
10011005 session .add (analyzer_statistics )
10021006
1007+ def __store_analysis_info_files (
1008+ self ,
1009+ session : SA_Session ,
1010+ analysis_info_id : int ,
1011+ report_dir_path : str
1012+ ):
1013+ """ Store analyzer related config files (e.g. skipfile) """
1014+ zip_report_dir = os .path .join (
1015+ self ._zip_dir , "reports" ,
1016+ hashlib .md5 (report_dir_path .encode ('utf-8' )).hexdigest ())
1017+
1018+ skip_file = os .path .join (zip_report_dir , 'skip_file' )
1019+ if os .path .isfile (skip_file ):
1020+ content_hash = self .__add_file_content (session , skip_file )
1021+
1022+ if (not session .get (AnalysisInfoFile ,
1023+ (analysis_info_id , content_hash ))):
1024+ session .add (AnalysisInfoFile (
1025+ analysis_info_id = analysis_info_id ,
1026+ filename = "skip_file" ,
1027+ content_hash = content_hash ))
1028+
10031029 def __store_analysis_info (
10041030 self ,
10051031 session : DBSession ,
@@ -1012,37 +1038,30 @@ def __store_analysis_info(
10121038 analyzer_command .encode ("utf-8" ),
10131039 zlib .Z_BEST_COMPRESSION )
10141040
1015- analysis_info_rows = session \
1016- .query (AnalysisInfo ) \
1017- .filter (AnalysisInfo .analyzer_command == cmd ) \
1018- .all ()
1019-
1020- if analysis_info_rows :
1021- # It is possible when multiple runs are stored
1022- # simultaneously to the server with the same analysis
1023- # command that multiple entries are stored into the
1024- # database. In this case we will select the first one.
1025- analysis_info = analysis_info_rows [0 ]
1026- else :
1027- analysis_info = AnalysisInfo (analyzer_command = cmd )
1028-
1029- # Obtain the ID eagerly to be able to use the M-to-N table.
1030- session .add (analysis_info )
1031- session .flush ()
1032- session .refresh (analysis_info , ["id" ])
1033-
1034- for analyzer in mip .analyzers :
1035- q = session \
1036- .query (Checker ) \
1037- .filter (Checker .analyzer_name == analyzer )
1038- db_checkers = {r .checker_name : r for r in q .all ()}
1039-
1040- connection_rows = [AnalysisInfoChecker (
1041- analysis_info , db_checkers [chk ], is_enabled )
1042- for chk , is_enabled
1043- in mip .checkers .get (analyzer , {}).items ()]
1044- for r in connection_rows :
1045- session .add (r )
1041+ analysis_info = AnalysisInfo (analyzer_command = cmd )
1042+
1043+ # Obtain the ID eagerly to be able to use the M-to-N table.
1044+ session .add (analysis_info )
1045+ session .flush ()
1046+ session .refresh (analysis_info , ["id" ])
1047+
1048+ for analyzer in mip .analyzers :
1049+ q = session \
1050+ .query (Checker ) \
1051+ .filter (Checker .analyzer_name == analyzer )
1052+ db_checkers = {r .checker_name : r for r in q .all ()}
1053+
1054+ connection_rows = [AnalysisInfoChecker (
1055+ analysis_info , db_checkers [chk ], is_enabled )
1056+ for chk , is_enabled
1057+ in mip .checkers .get (analyzer , {}).items ()]
1058+ for r in connection_rows :
1059+ session .add (r )
1060+
1061+ if mip .report_dir_path :
1062+ self .__store_analysis_info_files (session ,
1063+ analysis_info .id ,
1064+ mip .report_dir_path )
10461065
10471066 run_history .analysis_info .append (analysis_info )
10481067 self .__analysis_info [src_dir_path ] = analysis_info
0 commit comments