Package br.edu.utfpr.cm.JGitMinerWeb.dao

Examples of br.edu.utfpr.cm.JGitMinerWeb.dao.FileDAO


    @Override
    public void run() {
        try {
            PairFileDAO pairFileDAO = new PairFileDAO(dao);
            FileDAO fileDAO = new FileDAO(dao);

            if (getRepository() == null) {
                throw new IllegalArgumentException("Parâmetro Repository não pode ser nulo.");
            }

            Date futureBeginDate = getFutureBeginDate();
            Date futureEndDate = getFutureEndDate();
            Date beginDate = getBeginDate();
            Date endDate = getEndDate();
            List<String> filesToIgnore = getFilesToIgnore();
            List<String> filesToConsiders = getFilesToConsiders();

            out.printLog("Iniciando preenchimento da lista de pares.");

            List<Object> selectParams = new ArrayList<>();

            StringBuilder select = new StringBuilder();
            select.append("select distinct fil.filename, fil2.filename ")
                    .append(" from gitcommitfile fil, ")
                    .append("      gitpullrequest_gitrepositorycommit prc, ")
                    .append("      gitcommitfile fil2, ")
                    .append("      gitpullrequest_gitrepositorycommit prc2, ")
                    .append("      gitpullrequest pul, ")
                    .append("      gitissue i ")
                    .append("where pul.repository_id = ? ")
                    .append("  and pul.issue_id = i.id ")
                    .append("  and i.commentscount > 1 ")
                    .append("  and pul.createdat between ? and ? ");

            selectParams.add(getRepository().getId());
            selectParams.add(beginDate);
            selectParams.add(endDate);

            if (isOnlyMergeds()) {
                select.append("  and pul.mergedat is not null ");
            }
            select.append("  and prc.entitypullrequest_id = pul.id ")
                    .append("  and (select count(distinct(f.filename)) from gitcommitfile f where f.repositorycommit_id = prc.repositorycommits_id) between ? and ?")
                    .append("  and fil.repositorycommit_id = prc.repositorycommits_id ")
                    .append("  and prc2.entitypullrequest_id = pul.id ")
                    .append("  and (select count(distinct(f.filename)) from gitcommitfile f where f.repositorycommit_id = prc2.repositorycommits_id) between ? and ?")
                    .append("  and fil2.repositorycommit_id = prc2.repositorycommits_id ")
                    .append("  and md5(fil.filename) <> md5(fil2.filename) ");

            selectParams.add(getMinFilesPerCommit());
            selectParams.add(getMaxFilesPerCommit());
            selectParams.add(getMinFilesPerCommit());
            selectParams.add(getMaxFilesPerCommit());

            if (!filesToIgnore.isEmpty()) {
                for (String fileName : filesToIgnore) {
                    select.append("  and fil.filename not like ? ")
                            .append("  and fil2.filename not like ? ");
                    selectParams.add(fileName);
                    selectParams.add(fileName);
                }
            }

            if (!filesToConsiders.isEmpty()) {
                select.append(" and (");

                int likeFilename = 0;
                for (String fileName : filesToConsiders) {
                    if (likeFilename++ > 0) {
                        select.append("or");
                    }
                    select.append(" fil.filename like ? ");
                    selectParams.add(fileName);
                }
                select.append(")");

                select.append(" and (");

                likeFilename = 0;
                for (String fileName : filesToConsiders) {
                    if (likeFilename++ > 0) {
                        select.append("or");
                    }
                    select.append(" fil2.filename like ? ");
                    selectParams.add(fileName);
                }
                select.append(")");
            }

            System.out.println(select);

            List<Object[]> cochangeResult = dao.selectNativeWithParams(select.toString(), selectParams.toArray());

            Set<AuxFileFileMetrics> pairFileMetrics = new HashSet<>();
            for (Object[] record : cochangeResult) {
                AuxFileFileMetrics pairFile = new AuxFileFileMetrics(record[0] + "", record[1] + "");

                // o arquivo deve aparecer em mais de 1 pull request e não somente 1
                // caso contrário não é incluso
                long file1PullRequestIn = fileDAO.calculeNumberOfPullRequestWhereFileIsIn(
                        getRepository(), pairFile.getFile(),
                        beginDate, endDate, 0, getMaxFilesPerCommit(), isOnlyMergeds());
                if (file1PullRequestIn > 1) {

                    long file2PullRequestIn = fileDAO.calculeNumberOfPullRequestWhereFileIsIn(
                            getRepository(), pairFile.getFile2(),
                            beginDate, endDate, 0, getMaxFilesPerCommit(), isOnlyMergeds());
                    if (file2PullRequestIn > 1) {
                        pairFileMetrics.add(pairFile);
                    } else {
View Full Code Here


       
        // rede de comunicação de cada par de arquivo
        Map<AuxFileFile, DirectedSparseGraph<String, String>> pairFileNetwork = new HashMap<>();
       
        int countIgnored = 0;
        FileDAO fileDAO = new FileDAO(dao);
        PairFileDAO pairFileDAO = new PairFileDAO(dao);
        Long pullRequestsSize = pairFileDAO
                .calculeNumberOfPullRequest(repository,
                        null, null, futureBeginDate, futureEndDate, true);
       
        System.out.println("Number of all pull requests: " + pullRequestsSize);
       
        //Map<String, Long> futurePullRequest = new HashMap<>();
       
        // construindo a rede de comunicação para cada par de arquivo (desenvolvedores que comentaram)
        int nodesSize = getMatrix().getNodes().size();
        int count = 0;
        Set<AuxFileFile> pairFilesSet = new HashSet<>();
        for (int i = 0; i < getMatrix().getNodes().size(); i++) {
            if (count++ % 100 == 0 || count == nodesSize) {
                System.out.println(count + "/" + nodesSize);
            }
            EntityMatrixNode node = getMatrix().getNodes().get(i);
            String[] columns = node.getLine().split(JsfUtil.TOKEN_SEPARATOR);

            AuxFileFile pairFile = new AuxFileFile(columns[1], columns[2]);
            pairFilesSet.add(pairFile);
            // ignora %README%, %Rakefile, %CHANGELOG%, %Gemfile%, %.gitignore
//            if (isIgnored(pairFile.getFileName())
//                    || isIgnored(pairFile.getFileName2())) {
//                out.printLog("Ignoring " + pairFile);
//                countIgnored++;
//                continue;
//            }
           
//            Long pairFileNumberOfPullrequestOfPairFuture;
//            if (futurePullRequest.containsKey(pairFile.toString())) {
//                pairFileNumberOfPullrequestOfPairFuture = futurePullRequest.get(pairFile.toString());
//            } else {
//                pairFileNumberOfPullrequestOfPairFuture = pairFileDAO
//                    .calculeNumberOfPullRequest(repository,
//                            pairFile.getFileName(), pairFile.getFileName2(),
//                            futureBeginDate, futureEndDate, true);
//                futurePullRequest.put(pairFile.toString(), pairFileNumberOfPullrequestOfPairFuture);
//            }
           
//            Double supportPairFile = numberOfAllPullrequestFuture == 0 ? 0d :
//                    pairFileNumberOfPullrequestOfPairFuture.doubleValue() /
//                    numberOfAllPullrequestFuture.doubleValue();
           
           
            // minimum support is 0.01, ignore file if lower than this (0.01)
//            if (supportPairFile < Double.valueOf(0.01d)) {
//            if (pairFileNumberOfPullrequestOfPairFuture < 2) {
//                out.printLog("Ignoring " + pairFile + ": future pull requests " + pairFileNumberOfPullrequestOfPairFuture);
//                countIgnored++;
//                continue;
//            }
           
            String commiter1 = columns[0];
            String commiter2 = columns[3];
           
            /**
             * Extract all distinct developer that commit a pair of file
             */
            if (commitersPairFile.containsKey(pairFile)) {
                Set<String> commiters = commitersPairFile.get(pairFile);
                commiters.add(commiter1);
                commiters.add(commiter2);
            } else {
                Set<String> commiters = new HashSet<>();
                commiters.add(commiter1);
                commiters.add(commiter2);
                commitersPairFile.put(pairFile, commiters);
            }

            // adiciona conforme o peso
//            String edgeName = pairFile.getFileName() + "-" + pairFile.getFileName2() + "-" + i;
            AuxUserUser pairUser = new AuxUserUser(columns[0], columns[3]);
           
            /* Sum commit for each pair file that the pair devCommentter has commited. */
            // user > user2 - directed edge
            if (edgesWeigth.containsKey(pairUser.toStringUserAndUser2())) {
                // edgeName = user + user2
                edgesWeigth.put(pairUser.toStringUserAndUser2(), edgesWeigth.get(pairUser.toStringUserAndUser2()) + Integer.valueOf(columns[4]));
//            // for undirectional graph
//            } else if (edgesWeigth.containsKey(pairUser.toStringUser2AndUser())) {
//                // edgeName = user2 + user - undirected edge
//                edgesWeigth.put(pairUser.toStringUser2AndUser(), edgesWeigth.get(pairUser.toStringUser2AndUser()) + Integer.valueOf(columns[4]));
            } else {
                edgesWeigth.put(pairUser.toStringUserAndUser2(), Integer.valueOf(columns[4]));
            }
           
            if (!graph.containsVertex(pairUser.getUser())
                    || !graph.containsVertex(pairUser.getUser2())
                    || !graph.containsEdge(pairUser.toStringUserAndUser2())) {
                graph.addEdge(pairUser.toStringUserAndUser2(), pairUser.getUser(), pairUser.getUser2(), EdgeType.DIRECTED);
            }
               
            // check if network already created
            if (pairFileNetwork.containsKey(pairFile)) {
                pairFileNetwork.get(pairFile)
                        .addEdge(pairUser.toStringUserAndUser2(), pairUser.getUser(), pairUser.getUser2(), EdgeType.DIRECTED);
            } else {
                DirectedSparseGraph<String, String> graphMulti
                        = new DirectedSparseGraph<>();
                graphMulti.addEdge(pairUser.toStringUserAndUser2(), pairUser.getUser(), pairUser.getUser2(), EdgeType.DIRECTED);
                pairFileNetwork.put(pairFile, graphMulti);
            }
        }
        SimpleDateFormat format = new SimpleDateFormat("dd.MM.yyyy");
        JungExport.exportToImage(graph, "C:/Users/a562273/Desktop/networks/",
                repository.getName() + " Single " + format.format(beginDate) + " a " + format.format(endDate));
       
        out.printLog("Número de pares de arquivos ignoradoa: " + countIgnored);
       
        out.printLog("Número de autores de comentários (commenters): " + graph.getVertexCount());
        out.printLog("Número de pares de arquivos: " + commitersPairFile.size());
        out.printLog("Número de pares de arquivos new: " + pairFilesSet.size());
        out.printLog("Iniciando cálculo das métricas.");

        Set<AuxFileFileMetrics> fileFileMetrics = new HashSet<>();
       
        out.printLog("Calculando metricas SNA...");

        GlobalMeasure global = GlobalMeasureCalculator.calcule(graph);
        out.printLog("Global measures: " + global.toString());
        // Map<String, Double> barycenter = BarycenterCalculator.calcule(graph, edgesWeigth);
        Map<String, Double> betweenness = BetweennessCalculator.calcule(graph, edgesWeigth);
        Map<String, Double> closeness = ClosenessCalculator.calcule(graph, edgesWeigth);
        Map<String, Integer> degree = DegreeCalculator.calcule(graph);
         Map<String, Double> eigenvector = EigenvectorCalculator.calcule(graph, edgesWeigth);
        Map<String, EgoMeasure<String>> ego = EgoMeasureCalculator.calcule(graph, edgesWeigth);
        Map<String, StructuralHolesMeasure<String>> structuralHoles = StructuralHolesCalculator.calcule(graph, edgesWeigth);

        // number of pull requests in date interval
        Long numberOfAllPullrequestFuture = pairFileDAO.calculeNumberOfPullRequest(getRepository(), null, null, futureBeginDate, futureEndDate, true);
        // cache for optimization number of pull requests where file is in,
        // reducing access to database
        Map<String, Long> pullRequestFileMap = new HashMap<>();
        // cache for optimization file code churn (add, del, change),
        // reducing access to database
        Map<String, AuxCodeChurn> codeChurnRequestFileMap = new HashMap<>();
        Map<String, AuxCodeChurn> cummulativeCodeChurnRequestFileMap = new HashMap<>();
        // cache for optimization file commits made by user,
        // reducing access to database
        Map<String, AuxCodeChurn> fileUserCommitMap = new HashMap<>();

        out.printLog("Calculando somas, máximas, médias, updates, code churn e apriori para cada par de arquivos...");
        count = 0;
        final int size = commitersPairFile.entrySet().size();
        out.printLog("Número de pares de arquivos: " + commitersPairFile.keySet().size());
        for (Map.Entry<AuxFileFile, Set<String>> entry : commitersPairFile.entrySet()) {
            if (count++ % 100 == 0 || count == size) {
                System.out.println(count + "/" + size);
            }
            AuxFileFile fileFile = entry.getKey();
            Set<String> devsCommentters = entry.getValue();
           
            // pair file network
            GlobalMeasure pairFileGlobal = GlobalMeasureCalculator.calcule(pairFileNetwork.get(fileFile));
           
//            Double barycenterSum = 0d, barycenterAvg, barycenterMax = Double.NEGATIVE_INFINITY;
            Double betweennessSum = 0d, betweennessAvg, betweennessMax = Double.NEGATIVE_INFINITY;
            Double closenessSum = 0d, closenessAvg, closenessMax = Double.NEGATIVE_INFINITY;
            Integer degreeSum = 0, degreeMax = Integer.MIN_VALUE;
            Double degreeAvg;
            Double eigenvectorSum = 0d, eigenvectorAvg, eigenvectorMax = Double.NEGATIVE_INFINITY;

            Double egoBetweennessSum = 0d, egoBetweennessAvg, egoBetweennessMax = Double.NEGATIVE_INFINITY;
            Long egoSizeSum = 0l, egoSizeMax = Long.MIN_VALUE;
//            Long egoPairsSum = 0l, egoPairsMax = Long.MIN_VALUE;
            Long egoTiesSum = 0l, egoTiesMax = Long.MIN_VALUE;
            Double egoSizeAvg, /*egoPairsAvg,*/ egoTiesAvg;
            Double egoDensitySum = 0d, egoDensityAvg, egoDensityMax = Double.NEGATIVE_INFINITY;

            Double efficiencySum = 0.0d, efficiencyAvg, efficiencyMax = Double.NEGATIVE_INFINITY;
            Double effectiveSizeSum = 0.0d, effectiveSizeAvg, effectiveSizeMax = Double.NEGATIVE_INFINITY;
            Double constraintSum = 0.0d, constraintAvg, constraintMax = Double.NEGATIVE_INFINITY;
            Double hierarchySum = 0.0d, hierarchyAvg, hierarchyMax = Double.NEGATIVE_INFINITY;

            for (String commenter : devsCommentters) {
                // sums calculation
//                barycenterSum += barycenter.get(commenter);
                betweennessSum += betweenness.get(commenter);
                closenessSum += Double.isInfinite(closeness.get(commenter)) ? 0 : closeness.get(commenter);
                degreeSum += degree.get(commenter);
//                eigenvectorSum += eigenvector.get(commenter);

                egoBetweennessSum += ego.get(commenter).getBetweennessCentrality();
                egoSizeSum += ego.get(commenter).getSize();
//                egoPairsSum += ego.get(commenter).getPairs();
                egoTiesSum += ego.get(commenter).getTies();
                egoDensitySum += ego.get(commenter).getDensity();

                efficiencySum += structuralHoles.get(commenter).getEfficiency();
                effectiveSizeSum += structuralHoles.get(commenter).getEffectiveSize();
                constraintSum += structuralHoles.get(commenter).getConstraint();
                hierarchySum += structuralHoles.get(commenter).getHierarchy();
                // maximum calculation
//                barycenterMax = Math.max(barycenterMax, barycenter.get(commenter));
                betweennessMax = Math.max(betweennessMax, betweenness.get(commenter));
                closenessMax = Math.max(closenessMax, Double.isInfinite(closeness.get(commenter)) ? 0 : closeness.get(commenter));
                degreeMax = Math.max(degreeMax, degree.get(commenter));
                eigenvectorMax = Math.max(eigenvectorMax, eigenvector.get(commenter));

                egoBetweennessMax = Math.max(egoBetweennessMax, ego.get(commenter).getBetweennessCentrality());
                egoSizeMax = Math.max(egoSizeMax, ego.get(commenter).getSize());
//                egoPairsMax = Math.max(egoPairsMax, ego.get(commenter).getPairs());
                egoTiesMax = Math.max(egoTiesMax, ego.get(commenter).getTies());
                egoDensityMax = Math.max(egoDensityMax, ego.get(commenter).getDensity());

                efficiencyMax = Math.max(efficiencyMax, structuralHoles.get(commenter).getEfficiency());
                effectiveSizeMax = Math.max(effectiveSizeMax, structuralHoles.get(commenter).getEffectiveSize());
                constraintMax = Math.max(constraintMax, structuralHoles.get(commenter).getConstraint());
                hierarchyMax = Math.max(hierarchyMax, structuralHoles.get(commenter).getHierarchy());

            }

            // Average calculation /////////////////////////////////////////////
            Integer distinctCommentersCount = devsCommentters.size();
//            barycenterAvg = barycenterSum / (double) distinctCommentersCount;
            betweennessAvg = betweennessSum / distinctCommentersCount.doubleValue();
            closenessAvg = closenessSum / distinctCommentersCount.doubleValue();
            degreeAvg = degreeSum / distinctCommentersCount.doubleValue();
            eigenvectorAvg = eigenvectorSum / distinctCommentersCount.doubleValue();

            egoBetweennessAvg = egoBetweennessSum / distinctCommentersCount.doubleValue();
            egoSizeAvg = egoSizeSum / distinctCommentersCount.doubleValue();
//            egoPairsAvg = egoPairsSum / distinctCommentersCount;
            egoTiesAvg = egoTiesSum / distinctCommentersCount.doubleValue();
            egoDensityAvg = egoDensitySum / distinctCommentersCount.doubleValue();

            efficiencyAvg = efficiencySum / distinctCommentersCount.doubleValue();
            effectiveSizeAvg = effectiveSizeSum / distinctCommentersCount.doubleValue();
            constraintAvg = constraintSum / distinctCommentersCount.doubleValue();
            hierarchyAvg = hierarchySum / distinctCommentersCount.doubleValue();

            // Weighted geometric average: issue > committers + commits ////////
            final long[][] committersCommitsPerIssue = pairFileDAO.calculeCommittersXCommits(
                    repository, fileFile.getFileName(), fileFile.getFileName2(), beginDate, endDate);
            final double geometricAverageCommittersCommits
                    = MathUtils.calculateWeightedGeometricAverage(committersCommitsPerIssue);

            // Commit-based metrics ////////////////////////////////////////////
            final long changes = calculeFileCodeChurn(codeChurnRequestFileMap, fileFile.getFileName(), fileDAO, beginDate, endDate);
            final long changes2 = calculeFileCodeChurn(codeChurnRequestFileMap, fileFile.getFileName2(), fileDAO, beginDate, endDate);

            final long cummulativeChanges = calculeFileCodeChurn(cummulativeCodeChurnRequestFileMap, fileFile.getFileName(), fileDAO, null, endDate);
            final long cummulativeChanges2 = calculeFileCodeChurn(cummulativeCodeChurnRequestFileMap, fileFile.getFileName2(), fileDAO, null, endDate);

            Set<AuxUser> devsCommitters = pairFileDAO.selectCommitters(repository,
                    fileFile.getFileName(), fileFile.getFileName2(), beginDate, endDate);

            Long devCommitsSum = 0l, devCommitsMax = 0l;
            Double devCommitsAvg;
            Double ownershipSum = 0.0d, ownershipAvg, ownershipMax = 0.0d;
            Long minorContributors = 0l, majorContributors = 0l;
            Double ownerExperience = 0.0d, ownerExperience2 = 0.0d, cummulativeOwnerExperience = 0.0d, cummulativeOwnerExperience2 = 0.0d;

            long committers = devsCommitters.size();
            long distinctCommitters = pairFileDAO.calculeCommitters(repository,
                    fileFile.getFileName(), fileFile.getFileName2(), null, endDate);

            Long commits = pairFileDAO.calculeCommits(repository,
                    fileFile.getFileName(), fileFile.getFileName2(),
                    beginDate, endDate);

            for (AuxUser devCommitter : devsCommitters) {
                Long devCommits = pairFileDAO.calculeCommits(repository,
                        fileFile.getFileName(), fileFile.getFileName2(), devCommitter.getUser(),
                        beginDate, endDate);
                devCommitsSum += devCommits;

                Double ownership = devCommits.doubleValue() / commits.doubleValue();
                ownershipSum += ownership;

                if (ownership <= 0.05) { // menor ou igual que 5% = minor
                    minorContributors++;
                } else { // maior que 5% = major
                    majorContributors++;
                }

                devCommitsMax = Math.max(devCommitsMax, devCommits);
                ownershipMax = Math.max(ownershipMax, ownership);

                // Calculing OEXP of each file
                Double experience = calculeDevFileExperience(changes, fileUserCommitMap, fileFile.getFileName(), devCommitter.getUser(), fileDAO, beginDate, endDate);
                ownerExperience = Math.max(experience, ownerExperience);

                Double experience2 = calculeDevFileExperience(changes2, fileUserCommitMap, fileFile.getFileName2(), devCommitter.getUser(), fileDAO, beginDate, endDate);
                ownerExperience2 = Math.max(experience2, ownerExperience2);

                // Calculing OWN
                Double cumulativeExperience = calculeDevFileExperience(cummulativeChanges, fileUserCommitMap, fileFile.getFileName(), devCommitter.getUser(), fileDAO, null, endDate);
                cummulativeOwnerExperience = Math.max(cummulativeOwnerExperience, cumulativeExperience);

                Double cumulativeExperience2 = calculeDevFileExperience(cummulativeChanges2, fileUserCommitMap, fileFile.getFileName2(), devCommitter.getUser(), fileDAO, null, endDate);
                cummulativeOwnerExperience2 = Math.max(cummulativeOwnerExperience2, cumulativeExperience2);

            }

            devCommitsAvg = (double) devCommitsSum / (double) committers;
            ownershipAvg = (double) ownershipSum / (double) committers;

//            double majorContributorsRate = (double) majorContributors / (double) committers; // % de major
//            double minorContributorsRate = (double) minorContributors / (double) committers; // % de minor

            Long updates = pairFileDAO.calculeNumberOfPullRequest(repository,
                    fileFile.getFileName(), fileFile.getFileName2(),
                    beginDate, endDate, true);

            Long futureUpdates;
            if (beginDate.equals(futureBeginDate) && endDate.equals(futureEndDate)) {
                futureUpdates = updates;
            } else {
                futureUpdates = pairFileDAO.calculeNumberOfPullRequest(repository,
                        fileFile.getFileName(), fileFile.getFileName2(),
                        futureBeginDate, futureEndDate, true);
            }

            // list all issues and its comments
            List<AuxWordiness> issuesAndComments = pairFileDAO.listIssues(repository,
                    fileFile.getFileName(), fileFile.getFileName2(), beginDate, endDate, true);

            long wordiness = 0;
            for (AuxWordiness auxWordiness : issuesAndComments) {
                wordiness += WordinessCalculator.calcule(auxWordiness);
            }

            Long commentsSum = pairFileDAO.calculeComments(repository,
                    fileFile.getFileName(), fileFile.getFileName2(),
                    beginDate, endDate, true);

            Long codeChurn = fileDAO.calculeCodeChurn(repository,
                    fileFile.getFileName(), beginDate, endDate);
            Long codeChurn2 = fileDAO.calculeCodeChurn(repository,
                    fileFile.getFileName2(), beginDate, endDate);

            AuxCodeChurn pairFileCodeChurn = pairFileDAO.calculeCodeChurnAddDelChange(repository,
                    fileFile.getFileName2(), fileFile.getFileName(),
                    beginDate, endDate);
View Full Code Here

            throw new IllegalArgumentException("Parâmetro Repository não pode ser nulo.");
        }

        Date beginDate = getBeginDate();
        Date endDate = getEndDate();
        FileDAO fileDAO = new FileDAO(dao);

        StringBuilder jpql = new StringBuilder();

        Map<AuxUserFileFileUserDirectional, AuxUserFileFileUserDirectional> result = new HashMap<>();

        Pattern fileToConsiders = MatcherUtils.createExtensionIncludeMatcher(getFilesToConsiders());
        //Pattern fileToIgnore = MatcherUtils.createExcludeMatcher(getFilesToIgnore());

        final List<String> paramNames = new ArrayList<>();
        paramNames.add("repo");
        paramNames.add("beginDate");
        paramNames.add("endDate");

        final List<Object> paramValues = new ArrayList<>();
        paramValues.add(getRepository());
        paramValues.add(beginDate);
        paramValues.add(endDate);
       
        jpql.append("SELECT DISTINCT i")
            .append(" FROM")
            .append(" EntityPullRequest p JOIN p.issue i")
            .append(" WHERE")
            .append(" p.repository = :repo")
            .append(" AND p.createdAt BETWEEN :beginDate AND :endDate")
            .append(" AND i.commentsCount > 1");

        if (isOnlyMerged()) {
            jpql.append(" AND p.mergedAt IS NOT NULL");
        }

        System.out.println(jpql);

        // select a issue/pullrequest comments
        List<EntityIssue> issuesCommenteds = dao.selectWithParams(jpql.toString(),
                paramNames.toArray(new String[paramNames.size()]),
                paramValues.toArray());
       
        out.printLog("Issues comentadas: " + issuesCommenteds.size());

        final String selectPullRequests = "SELECT p "
                + " FROM EntityPullRequest p "
                + " WHERE p.repository = :repo "
                + (isOnlyMerged() ? " AND p.mergedAt IS NOT NULL " : "") // merged
                + " AND p.createdAt BETWEEN :beginDate AND :endDate"
                + " AND p.issue = :issue "
                + " ORDER BY p.createdAt ";

        final String[] selectPullRequestsParams = new String[]{"repo", "beginDate", "endDate", "issue"};

        final String selectComments = "SELECT c "
                + " FROM EntityComment c "
                + " WHERE c.issue = :issue "
                + " ORDER BY c.createdAt ";

        final String[] selectCommentsParams = new String[]{"issue"};

        int count = 1;
        int realPairFilesCount = 0;

        for (EntityIssue issue : issuesCommenteds) {
            out.printLog("##################### NR: " + issue.getNumber() + " URL: " + issue.getUrl());
            out.printLog(count + " of the " + issuesCommenteds.size());

            EntityPullRequest pr = dao.selectOneWithParams(selectPullRequests,
                    selectPullRequestsParams,
                    new Object[]{getRepository(), beginDate, endDate, issue});

            out.printLog("Pull Request #" + pr.getId());
            if (pr.getRepositoryCommits().isEmpty()) {
                out.printLog("No Commits in Pull Request");
                count++;
                continue;
            }

            out.printLog(pr.getRepositoryCommits().size() + " commits in pull request ");

            List<EntityCommitFile> commitFiles = new ArrayList<>();
            for (EntityRepositoryCommit comm : pr.getRepositoryCommits()) {
                if (comm.getFiles().size() <= getMaxFilesPerCommit()) {
                    for (EntityCommitFile entityCommitFile : comm.getFiles()) {
                        long countPullRequestIn = fileDAO.calculeNumberOfPullRequestWhereFileIsIn(
                                getRepository(), entityCommitFile.getFilename(),
                                beginDate, endDate, 0, getMaxFilesPerCommit(), isOnlyMerged());
                        if (//!fileToIgnore.matcher(file2.getFilename()).matches() &&
                                fileToConsiders.matcher(entityCommitFile.getFilename()).matches()
                                && countPullRequestIn > 1) {
View Full Code Here

TOP

Related Classes of br.edu.utfpr.cm.JGitMinerWeb.dao.FileDAO

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.