Refactor
This commit is contained in:
parent
6701ec3df0
commit
c460632dec
@ -19,8 +19,9 @@ repositories {
|
|||||||
dependencies {
|
dependencies {
|
||||||
implementation 'org.jetbrains.kotlin:kotlin-stdlib'
|
implementation 'org.jetbrains.kotlin:kotlin-stdlib'
|
||||||
implementation "com.xenomachina:kotlin-argparser:$kotlin_argparser_version"
|
implementation "com.xenomachina:kotlin-argparser:$kotlin_argparser_version"
|
||||||
implementation "org.jetbrains.exposed:exposed-core:0.24.1"
|
implementation "org.jetbrains.exposed:exposed-core:0.25.1"
|
||||||
implementation "org.jetbrains.exposed:exposed-dao:0.24.1"
|
implementation "org.jetbrains.exposed:exposed-dao:0.25.1"
|
||||||
implementation "org.jetbrains.exposed:exposed-jdbc:0.24.1"
|
implementation "org.jetbrains.exposed:exposed-jdbc:0.25.1"
|
||||||
|
compile("org.xerial:sqlite-jdbc:3.30.1")
|
||||||
testImplementation 'junit:junit:4.12'
|
testImplementation 'junit:junit:4.12'
|
||||||
}
|
}
|
||||||
|
@ -1,27 +1,51 @@
|
|||||||
package me.msoucy.gbat
|
package me.msoucy.gbat
|
||||||
|
|
||||||
import java.io.File
|
import java.io.File
|
||||||
|
import org.jetbrains.exposed.sql.*
|
||||||
|
import org.jetbrains.exposed.sql.transactions.transaction
|
||||||
|
|
||||||
|
import me.msoucy.gbat.models.CondensedAnalysis
|
||||||
|
import me.msoucy.gbat.models.Condensation
|
||||||
import me.msoucy.gbat.models.KnowledgeModel
|
import me.msoucy.gbat.models.KnowledgeModel
|
||||||
import me.msoucy.gbat.models.LineModel
|
import me.msoucy.gbat.models.LineModel
|
||||||
import me.msoucy.gbat.models.RiskModel
|
import me.msoucy.gbat.models.RiskModel
|
||||||
|
|
||||||
private data class Condensation(val authors : List<String>, val knowledge : Double, val orphaned : Double, val atRisk : Double = 0.0) : Comparable<Condensation> {
|
fun analyze(
|
||||||
override operator fun compareTo(other : Condensation) : Int {
|
repoRoot : String,
|
||||||
return -1
|
projectRoot : String,
|
||||||
}
|
fname : String,
|
||||||
}
|
riskModel : RiskModel,
|
||||||
private class Result(val repoRoot : File,
|
createdConstant : Double,
|
||||||
val projectRoot : File,
|
historyItem : HistoryItem,
|
||||||
val fname : File,
|
verbose : Boolean = false
|
||||||
val results : List<Pair<String, List<Condensation>>>)
|
) : CondensedAnalysis {
|
||||||
|
val lineModel = LineModel()
|
||||||
|
val db = Database.connect("jdbc:sqlite:memory:", "org.sqlite.JDBC")
|
||||||
|
val knowledgeModel = KnowledgeModel(db, createdConstant, riskModel)
|
||||||
|
var changesProcessed = 0
|
||||||
|
|
||||||
private fun condenseAnalysis(repoRoot : File,
|
historyItem.authorDiffs.forEach { (author, changes) ->
|
||||||
projectRoot : File,
|
changes.forEach { change ->
|
||||||
fname : File,
|
changesProcessed++
|
||||||
lineModel : LineModel,
|
if(changesProcessed % 1000 == 0 && verbose) {
|
||||||
knowledgeModel : KnowledgeModel,
|
System.err.println("Analyzer applied change #${changesProcessed}")
|
||||||
riskModel : RiskModel) : Result {
|
}
|
||||||
|
lineModel.apply(change.eventType, change.lineNum, change.lineVal ?: "")
|
||||||
|
knowledgeModel.apply(change.eventType, author, change.lineNum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return condenseAnalysis(repoRoot, projectRoot, fname, lineModel, knowledgeModel, riskModel)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun condenseAnalysis(
|
||||||
|
repoRoot : String,
|
||||||
|
projectRoot : String,
|
||||||
|
fname : String,
|
||||||
|
lineModel : LineModel,
|
||||||
|
knowledgeModel : KnowledgeModel,
|
||||||
|
riskModel : RiskModel
|
||||||
|
) : CondensedAnalysis {
|
||||||
val condensations = lineModel.get().mapIndexed { idx, line ->
|
val condensations = lineModel.get().mapIndexed { idx, line ->
|
||||||
val knowledges = knowledgeModel.knowledgeSummary(idx + 1).map { (authors, knowledge) ->
|
val knowledges = knowledgeModel.knowledgeSummary(idx + 1).map { (authors, knowledge) ->
|
||||||
Condensation(authors,
|
Condensation(authors,
|
||||||
@ -31,5 +55,5 @@ private fun condenseAnalysis(repoRoot : File,
|
|||||||
}.sorted()
|
}.sorted()
|
||||||
Pair(line, knowledges)
|
Pair(line, knowledges)
|
||||||
}
|
}
|
||||||
return Result(repoRoot, projectRoot, fname, condensations)
|
return CondensedAnalysis(repoRoot, projectRoot, fname, condensations.mutableCopyOf())
|
||||||
}
|
}
|
@ -93,11 +93,11 @@ fun main(args: Array<String>) = mainBody {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val risk_thresh = risk_threshold ?: default_bus_risk.pow(3)
|
val riskThresh = risk_threshold ?: default_bus_risk.pow(3)
|
||||||
val interesting_res = parse_interesting(if (interesting.isEmpty()) DEFAULT_INTERESTING_RES else interesting)
|
val interesting_res = parse_interesting(if (interesting.isEmpty()) DEFAULT_INTERESTING_RES else interesting)
|
||||||
val not_interesting_res = if (not_interesting.isEmpty()) listOf() else parse_interesting(not_interesting)
|
val not_interesting_res = if (not_interesting.isEmpty()) listOf() else parse_interesting(not_interesting)
|
||||||
|
|
||||||
val project_root_file = File(project_root).also {
|
val projectRootFile = File(project_root).also {
|
||||||
if(!it.isDirectory)
|
if(!it.isDirectory)
|
||||||
throw InvalidArgumentException("Provided project root does not exist")
|
throw InvalidArgumentException("Provided project root does not exist")
|
||||||
}
|
}
|
||||||
@ -125,7 +125,14 @@ fun main(args: Array<String>) = mainBody {
|
|||||||
}
|
}
|
||||||
|
|
||||||
val pool = Executors.newFixedThreadPool(num_analyzer_procs + num_git_procs + 1)
|
val pool = Executors.newFixedThreadPool(num_analyzer_procs + num_git_procs + 1)
|
||||||
|
|
||||||
|
fnames.forEach { fname ->
|
||||||
|
pool.submit {
|
||||||
|
parseHistory(repo, projectRootFile, File(fname))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
val summ_result = mutableListOf<Int>()
|
val summ_result = mutableListOf<Int>()
|
||||||
|
val dbFname = File(outDir, "summary.db")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
139
src/main/kotlin/me/msoucy/gbat/ParseHistory.kt
Normal file
139
src/main/kotlin/me/msoucy/gbat/ParseHistory.kt
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
package me.msoucy.gbat
|
||||||
|
|
||||||
|
import java.io.File
|
||||||
|
import kotlin.math.abs
|
||||||
|
import kotlin.math.max
|
||||||
|
|
||||||
|
import me.msoucy.gbat.models.ChangeType
|
||||||
|
import me.msoucy.gbat.models.Event
|
||||||
|
|
||||||
|
data class HistoryItem(
|
||||||
|
val repoRoot : File,
|
||||||
|
val projectRoot : File,
|
||||||
|
val fname : File,
|
||||||
|
val authorDiffs : List<Pair<String, List<Event>>>
|
||||||
|
)
|
||||||
|
|
||||||
|
fun parseHistory(repo : GitRepo,
|
||||||
|
projectRoot : File,
|
||||||
|
fname : File,
|
||||||
|
verbose : Boolean = false) : HistoryItem {
|
||||||
|
val entries = repo.log(fname)
|
||||||
|
val repoRoot = repo.root()
|
||||||
|
if(verbose) {
|
||||||
|
System.err.println("Parsing history for ${fname}")
|
||||||
|
}
|
||||||
|
return HistoryItem(repoRoot, projectRoot, fname,
|
||||||
|
entries.map { (author, diff) ->
|
||||||
|
Pair(author.trim(), diffWalk(diff))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun diffWalk(diff : Diff) : List<Event> {
|
||||||
|
|
||||||
|
fun String.startsChunk() = startsWith("@@")
|
||||||
|
fun String.isOldLine() = startsWith("-")
|
||||||
|
fun String.isNewLine() = startsWith("+")
|
||||||
|
|
||||||
|
fun chunkify() : List<List<String>> {
|
||||||
|
val chunks = mutableListOf<MutableList<String>>()
|
||||||
|
var curChunk = mutableListOf<String>()
|
||||||
|
diff.split("\n").forEach { line ->
|
||||||
|
if(line.startsChunk()) {
|
||||||
|
if(curChunk.isNotEmpty()) {
|
||||||
|
chunks.add(curChunk)
|
||||||
|
curChunk = mutableListOf<String>()
|
||||||
|
}
|
||||||
|
curChunk.add(line)
|
||||||
|
} else if(curChunk.isNotEmpty()) {
|
||||||
|
curChunk.add(line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(curChunk.isNotEmpty()) {
|
||||||
|
chunks.add(curChunk)
|
||||||
|
}
|
||||||
|
return chunks
|
||||||
|
}
|
||||||
|
|
||||||
|
val chunks = chunkify()
|
||||||
|
val events = mutableListOf<Event>()
|
||||||
|
|
||||||
|
class Hunk(
|
||||||
|
val lineNum : Int,
|
||||||
|
val oldLines : List<String>,
|
||||||
|
val newLines : List<String>
|
||||||
|
)
|
||||||
|
|
||||||
|
fun hunkize(chunkWoHeader : List<String>, firstLineNum : Int) : List<Hunk> {
|
||||||
|
var curOld = mutableListOf<String>()
|
||||||
|
var curNew = mutableListOf<String>()
|
||||||
|
var curLine = firstLineNum
|
||||||
|
var hunks = mutableListOf<Hunk>()
|
||||||
|
|
||||||
|
chunkWoHeader.forEach { line ->
|
||||||
|
if(line.isOldLine()) {
|
||||||
|
curOld.add(line)
|
||||||
|
} else if(line.isNewLine()) {
|
||||||
|
curNew.add(line)
|
||||||
|
} else if(curOld.isNotEmpty() || curNew.isNotEmpty()) {
|
||||||
|
hunks.add(Hunk(curLine, curOld, curNew))
|
||||||
|
curLine += curNew.size + 1
|
||||||
|
curOld = mutableListOf<String>()
|
||||||
|
curNew = mutableListOf<String>()
|
||||||
|
} else {
|
||||||
|
curLine++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(curOld.isNotEmpty() || curNew.isNotEmpty()) {
|
||||||
|
hunks.add(Hunk(curLine, curOld, curNew))
|
||||||
|
}
|
||||||
|
|
||||||
|
return hunks
|
||||||
|
}
|
||||||
|
|
||||||
|
fun stepHunk(hunk : Hunk) {
|
||||||
|
val oldLen = hunk.oldLines.size
|
||||||
|
val newLen = hunk.newLines.size
|
||||||
|
val maxLen = max(oldLen, newLen)
|
||||||
|
var lineNum = hunk.lineNum
|
||||||
|
|
||||||
|
for (i in 0..maxLen) {
|
||||||
|
if(i < oldLen && i < newLen) {
|
||||||
|
events += Event(
|
||||||
|
ChangeType.Change,
|
||||||
|
lineNum,
|
||||||
|
hunk.newLines[i].substring(1)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun stepChunk(chunk : List<String>) {
|
||||||
|
val header = chunk[0]
|
||||||
|
|
||||||
|
// format of header is
|
||||||
|
//
|
||||||
|
// @@ -old_line_num,cnt_lines_in_old_chunk, +new_line_num,cnt_lines_in_new_chunk
|
||||||
|
//
|
||||||
|
val (_, lineInfo, _) = header.split("@@")
|
||||||
|
val offsets = lineInfo.trim().split(" ")
|
||||||
|
|
||||||
|
// we only care about the new offset, since in the first chunk
|
||||||
|
// of the file the new and old are the same, and since we add
|
||||||
|
// and subtract lines as we go, we should stay in step with the
|
||||||
|
// new offsets.
|
||||||
|
val newOffset = offsets[1].split(",").map{
|
||||||
|
abs(it.toInt())
|
||||||
|
}.first()
|
||||||
|
|
||||||
|
// a hunk is a group of contiguous - + lines
|
||||||
|
val hunks = hunkize(chunk.subList(1, chunk.size), newOffset)
|
||||||
|
|
||||||
|
hunks.forEach(::stepHunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
chunks.forEach(::stepChunk)
|
||||||
|
|
||||||
|
return events
|
||||||
|
}
|
@ -20,14 +20,14 @@ class GitRepo(val projectRoot : File, val git_exe : String) {
|
|||||||
return out ?: ""
|
return out ?: ""
|
||||||
}
|
}
|
||||||
|
|
||||||
fun root() : String? {
|
fun root() : File {
|
||||||
val cmd = listOf(
|
val cmd = listOf(
|
||||||
git_exe,
|
git_exe,
|
||||||
"rev-parse",
|
"rev-parse",
|
||||||
"--show-toplevel"
|
"--show-toplevel"
|
||||||
)
|
)
|
||||||
val (out, _) = cmd.runCommand(projectRoot)
|
val (out, _) = cmd.runCommand(projectRoot)
|
||||||
return out
|
return File(out)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun log(fname : File) : List<Pair<String, Diff>> {
|
fun log(fname : File) : List<Pair<String, Diff>> {
|
||||||
|
@ -45,7 +45,7 @@ class KnowledgeModel(val db : Database, val constant : Double, val riskModel : R
|
|||||||
val SAFE_KNOWLEDGE_ACCT_ID = 1
|
val SAFE_KNOWLEDGE_ACCT_ID = 1
|
||||||
val KNOWLEDGE_PER_LINE_ADDED = 1000.0
|
val KNOWLEDGE_PER_LINE_ADDED = 1000.0
|
||||||
|
|
||||||
fun applyChange(changeType : ChangeType, author : String, lineNum : Int) = when(changeType) {
|
fun apply(changeType : ChangeType, author : String, lineNum : Int) = when(changeType) {
|
||||||
ChangeType.Add -> lineAdded(author, lineNum)
|
ChangeType.Add -> lineAdded(author, lineNum)
|
||||||
ChangeType.Change -> lineChanged(author, lineNum)
|
ChangeType.Change -> lineChanged(author, lineNum)
|
||||||
ChangeType.Remove -> lineRemoved(lineNum)
|
ChangeType.Remove -> lineRemoved(lineNum)
|
||||||
|
@ -12,7 +12,7 @@ class LineModel() {
|
|||||||
inner class Line(var num : Int, var text : String)
|
inner class Line(var num : Int, var text : String)
|
||||||
val model = mutableSetOf<Line>()
|
val model = mutableSetOf<Line>()
|
||||||
|
|
||||||
fun applyChange(changeType : ChangeType, lineNum : Int, lineText : String) = when(changeType) {
|
fun apply(changeType : ChangeType, lineNum : Int, lineText : String) = when(changeType) {
|
||||||
ChangeType.Add -> add(Line(lineNum, lineText))
|
ChangeType.Add -> add(Line(lineNum, lineText))
|
||||||
ChangeType.Change -> change(Line(lineNum, lineText))
|
ChangeType.Change -> change(Line(lineNum, lineText))
|
||||||
ChangeType.Remove -> del(Line(lineNum, lineText))
|
ChangeType.Remove -> del(Line(lineNum, lineText))
|
||||||
|
@ -2,4 +2,40 @@ package me.msoucy.gbat.models
|
|||||||
|
|
||||||
enum class ChangeType {
|
enum class ChangeType {
|
||||||
Add, Change, Remove
|
Add, Change, Remove
|
||||||
}
|
}
|
||||||
|
|
||||||
|
data class Event(
|
||||||
|
val eventType : ChangeType,
|
||||||
|
val lineNum : Int,
|
||||||
|
val lineVal : String?
|
||||||
|
)
|
||||||
|
|
||||||
|
data class Condensation(
|
||||||
|
val authors : List<String>,
|
||||||
|
val knowledge : Double,
|
||||||
|
val orphaned : Double,
|
||||||
|
val risk : Double = 0.0
|
||||||
|
) : Comparable<Condensation> {
|
||||||
|
override operator fun compareTo(other : Condensation) : Int {
|
||||||
|
var result = authors.size.compareTo(other.authors.size)
|
||||||
|
if(result == 0) {
|
||||||
|
authors.zip(other.authors).forEach { (a, b) ->
|
||||||
|
if(result == 0) result = a.compareTo(b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(result == 0)
|
||||||
|
result = knowledge.compareTo(other.knowledge)
|
||||||
|
if(result == 0)
|
||||||
|
result = orphaned.compareTo(other.orphaned)
|
||||||
|
if(result == 0)
|
||||||
|
result = risk.compareTo(other.risk)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class CondensedAnalysis(
|
||||||
|
var repoRoot : String = "",
|
||||||
|
var projectRoot : String = "",
|
||||||
|
var fileName : String = "",
|
||||||
|
var lineSummaries : MutableList<Pair<String, List<Condensation>>> = mutableListOf()
|
||||||
|
)
|
@ -7,20 +7,6 @@ import org.jetbrains.exposed.dao.id.IntIdTable
|
|||||||
import org.jetbrains.exposed.sql.*
|
import org.jetbrains.exposed.sql.*
|
||||||
import org.jetbrains.exposed.sql.transactions.transaction
|
import org.jetbrains.exposed.sql.transactions.transaction
|
||||||
|
|
||||||
class CondensedAnalysis {
|
|
||||||
class LineSummary {
|
|
||||||
var authors = listOf<String>()
|
|
||||||
var knowledge = 0.0
|
|
||||||
var risk = 0.0
|
|
||||||
var orphaned = 0.0
|
|
||||||
}
|
|
||||||
var repoRoot = ""
|
|
||||||
var project = ""
|
|
||||||
var projectRoot = ""
|
|
||||||
var fileName = ""
|
|
||||||
var lineSummaries = mutableListOf<Pair<String, List<LineSummary>>>()
|
|
||||||
}
|
|
||||||
|
|
||||||
class SummaryModel(val db : Database) {
|
class SummaryModel(val db : Database) {
|
||||||
|
|
||||||
object ProjectTable : IntIdTable("projects", "projectid") {
|
object ProjectTable : IntIdTable("projects", "projectid") {
|
||||||
@ -112,7 +98,7 @@ class SummaryModel(val db : Database) {
|
|||||||
|
|
||||||
fun summarize(ca : CondensedAnalysis) {
|
fun summarize(ca : CondensedAnalysis) {
|
||||||
val fname = adjustFname(File(ca.repoRoot), File(ca.projectRoot), File(ca.fileName))
|
val fname = adjustFname(File(ca.repoRoot), File(ca.projectRoot), File(ca.fileName))
|
||||||
val projectId = findOrCreateProject(ca.project)
|
val projectId = findOrCreateProject(ca.projectRoot)
|
||||||
|
|
||||||
var parentDirId = 0
|
var parentDirId = 0
|
||||||
splitAllDirs(fname.parentFile).forEach {
|
splitAllDirs(fname.parentFile).forEach {
|
||||||
|
Loading…
Reference in New Issue
Block a user