Remove gradle & cleanup

This commit is contained in:
s1lentq 2021-04-08 05:04:42 +07:00
parent ccd36e765b
commit c893651bf0
114 changed files with 90 additions and 6600 deletions

5
.gitignore vendored
View File

@ -1,6 +1,4 @@
**/build
**/.gradle
.idea
*.iml
*.bat
*.log
@ -21,6 +19,3 @@
**/msvc/ipch
rehlds/version/appversion.h
rehlds/_rehldsTestImg
rehlds/_dev
publish

View File

@ -32,6 +32,10 @@ Archive's bin directory contains 2 subdirectories, 'bugfixed' and 'pure'
## Configuring
Bugfixed version of rehlds contains an additional cvars:
<details>
<summary>Click to expand</summary>
<ul>
<li>listipcfgfile <filename> // File for permanent ip bans. Default: listip.cfg
<li>syserror_logfile <filename> // File for the system error log. Default: sys_error.log
@ -57,64 +61,55 @@ Bugfixed version of rehlds contains an additional cvars:
<li>sv_use_entity_file // Use custom entity file for a map. Path to an entity file will be "maps/[map name].ent". 0 - use original entities. 1 - use .ent files from maps directory. 2 - use .ent files from maps directory and create new .ent file if not exist.
</ul>
</details>
## Commands
Bugfixed version of rehlds contains an additional commands:
Bugfixed version of rehlds contains an additional commands
<ul>
<li>rescount // Prints the total count of precached resources in the server console
<li>reslist &lt;sound | model | decal | generic | event&gt; // Separately prints the details of the precached resources for sounds, models, decals, generic and events in server console. Useful for managing resources and dealing with the goldsource precache limits.
</ul>
## Build instructions
There are several software requirements for building rehlds:
<ol>
<li>Java Development Kit (JDK) 7+ (http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)</li>
<li>For Windows: Visual Studio 2013 and later</li>
<li>For Linux: Intel C++ Compiler 13 and later or GCC 4.9.2 or later (some earlier versions might work too)</li>
</ol>
### Checking requirements
#### JDK version
Windows<pre>&gt; %JAVA_HOME%\bin\javac -version
javac 1.8.0_25
There are several software requirements for building rehlds:
#### Windows
<pre>
Visual Studio 2013 and later
</pre>
Linux
<pre>$ javac -version
javac 1.7.0_65
</pre>
#### Visual Studio
Help -> About
#### ICC
<pre>$ icc --version
icc (ICC) 15.0.1 20141023
</pre>
#### GCC
<pre>$ gcc --version
gcc (Debian 4.9.2-10) 4.9.2
#### Linux
<pre>
cmake >= 3.10
GCC >= 4.9.2 (Optional)
ICC >= 15.0.1 20141023 (Optional)
LLVM (Clang) >= 6.0 (Optional)
</pre>
### Building
On Windows:
<pre>gradlew --max-workers=1 clean buildRelease</pre>
* For faster building without unit tests use this:exclamation:
<pre>gradlew --max-workers=1 clean buildFixes</pre>
<b>NOTE:</b> You can also use `Visual Studio` to build, just select from the solution configurations list `Release Swds` or `Debug Swds`<br />
On Linux (ICC):
<pre>./gradlew --max-workers=1 clean buildRelease</pre>
* For faster building without unit tests use this:exclamation:
<pre>./gradlew --max-workers=1 clean buildFixes</pre>
#### Windows
Use `Visual Studio` to build, open `msvc/ReHLDS.sln` and just select from the solution configurations list `Release Swds` or `Debug Swds`
On Linux (GCC):
<pre>./gradlew --max-workers=1 -PuseGcc clean buildRelease</pre>
* For faster building without unit tests use this:exclamation:
<pre>./gradlew --max-workers=1 -PuseGcc clean buildFixes</pre>
#### Linux
Also there is a task `buildEngine`, it builds only engine, without other parts of the project.<br />
Compiled binaries will be placed in the rehlds/build/binaries/ directory
<ul>
<li>
ICC:
<pre>./build.sh --compiler=intel</pre>
</li>
<li>
LLVM (Clang):
<pre>./build.sh --compiler=clang</pre>
</li>
<li>
GCC:
<pre>./build.sh --compiler=gcc</pre>
</li>
</ul>
## How can I help the project?
Just install it on your game server and report problems you faced.

View File

@ -1,55 +0,0 @@
import versioning.GitVersioner
import versioning.RehldsVersionInfo
import org.joda.time.DateTime
apply plugin: 'maven-publish'
apply from: 'shared.gradle'
group = 'rehlds'
apply plugin: 'idea'
idea {
project {
languageLevel = 'JDK_1_7'
}
}
def gitInfo = GitVersioner.versionForDir(project.rootDir)
RehldsVersionInfo versionInfo
if (gitInfo && gitInfo.tag && gitInfo.tag[0] == 'v') {
def m = gitInfo.tag =~ /^v(\d+)\.(\d+)(\.(\d+))?$/
if (!m.find()) {
throw new RuntimeException("Invalid git version tag name ${gitInfo.tag}")
}
versionInfo = new RehldsVersionInfo(
majorVersion: m.group(1) as int,
minorVersion: m.group(2) as int,
maintenanceVersion: m.group(4) ? (m.group(4) as int) : null,
localChanges: gitInfo.localChanges,
commitDate: gitInfo.commitDate,
commitSHA: gitInfo.commitSHA,
commitURL: gitInfo.commitURL
)
} else {
versionInfo = new RehldsVersionInfo(
majorVersion: project.majorVersion as int,
minorVersion: project.minorVersion as int,
maintenanceVersion: project.maintenanceVersion as int,
suffix: 'dev',
localChanges: gitInfo ? gitInfo.localChanges : true,
commitDate: gitInfo ? gitInfo.commitDate : new DateTime(),
commitSHA: gitInfo ? gitInfo.commitSHA : "",
commitURL: gitInfo ? gitInfo.commitURL : "",
commitCount: gitInfo ? (gitInfo.commitCount as int) : null
)
}
project.ext.rehldsVersionInfo = versionInfo
project.version = versionInfo.asMavenVersion()
apply from: 'publish.gradle'
task wrapper(type: Wrapper) {
gradleVersion = '2.4'
}

39
build.sh Executable file
View File

@ -0,0 +1,39 @@
#!/bin/bash
CC=gcc
CXX=g++
n=0
args=()
for i in "$@"
do
case $i in
-j=*|--jobs=*)
jobs="${i#*=}"
shift
;;
-c=*|--compiler=*)
C="${i#*=}"
shift
;;
*)
args[$n]="$i"
((++n))
;;
esac
done
case "$C" in
("intel"|"icc") CC=icc CXX=icpc ;;
("gcc") CC=gcc CXX=g++ ;;
("clang") CC=clang CXX=clang++ ;;
*)
;;
esac
rm -rf build
mkdir build
pushd build
CC=$CC CXX=$CXX cmake ${args[@]} ..
make -j${jobs}
popd

View File

@ -1,27 +0,0 @@
apply plugin: 'groovy'
repositories {
//mavenLocal()
mavenCentral()
maven {
url 'http://nexus.rehlds.org/nexus/content/repositories/rehlds-releases/'
}
maven {
url 'http://nexus.rehlds.org/nexus/content/repositories/rehlds-snapshots/'
}
maven {
url 'http://nexus.rehlds.org/nexus/content/repositories/rehlds-dev/'
}
}
dependencies {
compile gradleApi()
compile localGroovy()
compile 'commons-io:commons-io:2.4'
compile 'commons-lang:commons-lang:2.6'
compile 'joda-time:joda-time:2.7'
compile 'org.doomedsociety.gradlecpp:gradle-cpp-plugin:1.2'
compile 'org.eclipse.jgit:org.eclipse.jgit:3.7.0.201502260915-r'
compile 'org.apache.velocity:velocity:1.7'
}

View File

@ -1,58 +0,0 @@
package dirsync.builder
import dirsync.model.tree.DirectoryNode
import dirsync.model.tree.FileNode
import groovy.transform.CompileStatic
class FileSystemTreeBuilder {
@CompileStatic
private static FileNode<File> buildNodeForFile(File file, DirectoryNode<File> parent) {
if (parent.getChildren(file.name)) {
throw new RuntimeException("Parent dir ${parent.name} already contains child node ${file.name}");
}
return new FileNode(
name: file.name,
lastModifiedDate: file.lastModified(),
data: file,
parent: parent,
size: file.size()
);
}
@CompileStatic
private static DirectoryNode<File> buildNodeForDirectoryRecursive(File dir, DirectoryNode<File> parent) {
if (!dir.isDirectory()) {
throw new RuntimeException("File ${dir.absolutePath} is not a directory")
}
if (parent != null && parent.getChildren(dir.name)) {
throw new RuntimeException("Parent dir ${parent.name} already contains child node ${dir.name}");
}
DirectoryNode<File> thisNode = new DirectoryNode(
name: dir.name,
lastModifiedDate: dir.lastModified(),
data: dir,
parent: parent
);
dir.eachFile { File f ->
if (f.isDirectory()) {
thisNode.childNodes[f.name] = buildNodeForDirectoryRecursive(f, thisNode)
} else {
thisNode.childNodes[f.name] = buildNodeForFile(f, thisNode)
}
}
return thisNode;
}
static DirectoryNode<File> buildFileSystemTree(File rootDir) {
def root = buildNodeForDirectoryRecursive(rootDir, null);
PostBuildPass.doPostBuild(root)
return root
}
}

View File

@ -1,60 +0,0 @@
package dirsync.builder
import dirsync.model.tree.DirectoryNode
import dirsync.model.tree.FileNode
class FileTreeMerger {
private static <T> void mergeContentsRecursive(DirectoryNode<T> newParent, DirectoryNode<T> toMerge) {
toMerge.childNodes.each { cn ->
def node = cn.value
def existingNode = newParent.childNodes[node.name]
if (existingNode) {
if (!(existingNode instanceof DirectoryNode) || !(node instanceof DirectoryNode))
throw new RuntimeException("Failed to merge non-directory nodes ${node.fullPath}")
def existingDirNode = existingNode as DirectoryNode<T>
def dirNode = node as DirectoryNode<T>
existingDirNode.lastModifiedDate = Math.max(existingDirNode.lastModifiedDate, dirNode.lastModifiedDate)
mergeContentsRecursive(existingDirNode, dirNode)
} else {
if (node instanceof DirectoryNode) {
def dirNode = node as DirectoryNode<T>
def newNode = new DirectoryNode<T>(
name: dirNode.name,
data: dirNode.data,
parent: newParent,
lastModifiedDate: dirNode.lastModifiedDate
)
newParent.childNodes[node.name] = newNode
mergeContentsRecursive(newNode, dirNode)
} else {
FileNode<T> fileNode = node as FileNode<T>
FileNode<T> newNode = new FileNode<T>(
name: fileNode.name,
data: fileNode.data,
parent: newParent,
lastModifiedDate: fileNode.lastModifiedDate,
size: fileNode.size
)
newParent.childNodes[node.name] = newNode
}
}
}
}
public static <T> DirectoryNode<T> mergeTrees(DirectoryNode<T> tree1, DirectoryNode<T> tree2) {
DirectoryNode<T> newRoot = new DirectoryNode<T>(
name: tree1.name ?: tree2.name
)
mergeContentsRecursive(newRoot, tree1)
mergeContentsRecursive(newRoot, tree2)
PostBuildPass.doPostBuild(newRoot)
return newRoot
}
}

View File

@ -1,22 +0,0 @@
package dirsync.builder
import dirsync.model.tree.DirectoryNode
class PostBuildPass {
private static <T> void postProcessRecursive(DirectoryNode<T> dir) {
dir.childNodes.each { cne ->
def childNode = cne.value
childNode.fullPath = dir.fullPath ? dir.fullPath + '/' + childNode.name : childNode.name
if (childNode instanceof DirectoryNode) {
def childDirNode = childNode as DirectoryNode<T>
postProcessRecursive(childDirNode)
}
}
}
static <T> void doPostBuild(DirectoryNode<T> root) {
root.fullPath = ''
postProcessRecursive(root)
}
}

View File

@ -1,53 +0,0 @@
package dirsync.builder
import dirsync.model.tree.DirectoryNode
import dirsync.model.tree.FileNode
import dirsync.model.tree.ZipData
import java.util.zip.ZipFile
class ZipTreeBuilder {
static DirectoryNode<ZipData> buildForZipArchive(String zipArchive, ZipFile zf) {
DirectoryNode<ZipData> root = new DirectoryNode<>()
zf.entries().each { ze ->
def path = ze.name.replace('\\', '/')
if (path.endsWith('/'))
path = path.substring(0, path.length() - 1)
def parentPath = path.contains('/') ? path.substring(0, path.lastIndexOf('/')) : ''
def childPath = path.contains('/') ? path.substring(path.lastIndexOf('/') + 1) : path
def parentNode = (DirectoryNode<ZipData>) root.getByPath(parentPath)
if (parentNode == null)
throw new RuntimeException("Error reading ${zipArchive}: could not find parent path ${parentPath} for path ${path}")
def childNode = parentNode.getChildren(childPath)
if (childNode)
throw new RuntimeException("Error reading ${zipArchive}: duplicate path ${path}")
if (ze.directory) {
childNode = new DirectoryNode<ZipData>(
name: childPath,
lastModifiedDate: ze.time,
data: new ZipData(zipEntryName: ze.name, zipArchiveName: zipArchive),
parent: parentNode
);
} else {
childNode = new FileNode<ZipData>(
name: childPath,
lastModifiedDate: ze.time,
data: new ZipData(zipEntryName: ze.name, zipArchiveName: zipArchive),
parent: parentNode,
size: ze.size
);
}
parentNode.childNodes[childPath] = childNode
//println '' + ze.directory + ' ' + ze.name + ' ' + parentPath + ' ' + childPath
}
PostBuildPass.doPostBuild(root)
return root
}
}

View File

@ -1,97 +0,0 @@
package dirsync.merger
import dirsync.model.synccmd.AbstractSyncCmd
import dirsync.model.synccmd.CopyDirCmd
import dirsync.model.synccmd.CopyFileCmd
import dirsync.model.synccmd.DeleteDirCmd
import dirsync.model.synccmd.DeleteFileCmd
import dirsync.model.synccmd.ReplaceFileCmd
import dirsync.model.tree.DirectoryNode
import dirsync.model.tree.FileNode
import groovy.transform.TypeChecked
@TypeChecked
class FileTreeComparator {
private static <T, U> void mergeDirsRecursive(DirectoryNode<T> left, DirectoryNode<U> right, List<AbstractSyncCmd<T, U>> diffs) {
// left => right
left.childNodes.each { le ->
def leftNode = le.value
def rightNode = right.childNodes[leftNode.name]
if (rightNode == null) {
switch (leftNode) {
case DirectoryNode:
def leftDirNode = leftNode as DirectoryNode<T>
diffs << new CopyDirCmd<>(src: leftDirNode, dstParentDir: right)
break
case FileNode:
def leftFileNode = leftNode as FileNode<T>
diffs << new CopyFileCmd<>(src: leftFileNode, dstDir: right)
break
default:
throw new RuntimeException("Invalid node class ${leftNode.class.name}")
}
return
}
if (rightNode.class != leftNode.class) {
throw new RuntimeException("node classes mismatch: ${leftNode.class.name} != ${rightNode.class.name}")
}
switch (rightNode) {
case DirectoryNode:
def leftDirNode = leftNode as DirectoryNode<T>
def rightDirNode = rightNode as DirectoryNode<U>
mergeDirsRecursive(leftDirNode, rightDirNode, diffs)
break
case FileNode:
def leftFileNode = leftNode as FileNode<T>
def rightFileNode = rightNode as FileNode<T>
if (leftFileNode.size != rightFileNode.size || leftFileNode.lastModifiedDate != rightFileNode.lastModifiedDate) {
diffs << new ReplaceFileCmd<>(src: leftFileNode, dst: rightFileNode)
}
break
default:
throw new RuntimeException("Invalid node class ${rightNode.class.name}")
}
} // ~left => right
//right => left
right.childNodes.each { re ->
def rightNode = re.value
def leftNode = left.childNodes[rightNode.name]
if (leftNode != null) {
return //already processed in left => right
}
switch (rightNode) {
case DirectoryNode:
def rightDirNode = rightNode as DirectoryNode<U>
diffs << new DeleteDirCmd<>(dirNode: rightDirNode)
break
case FileNode:
def rightFileNode = rightNode as FileNode<T>
diffs << new DeleteFileCmd<>(node: rightFileNode)
break
default:
throw new RuntimeException("Invalid node class ${rightNode.class.name}")
}
} // ~right => left
}
static <T, U> List<AbstractSyncCmd<T, U>> mergeTrees(DirectoryNode<T> leftRoot, DirectoryNode<U> rightRoot) {
List<AbstractSyncCmd<T, U>> res = []
mergeDirsRecursive(leftRoot, rightRoot, res)
return res
}
}

View File

@ -1,103 +0,0 @@
package dirsync.merger
import dirsync.model.synccmd.AbstractSyncCmd
import dirsync.model.synccmd.CopyDirCmd
import dirsync.model.synccmd.CopyFileCmd
import dirsync.model.synccmd.DeleteDirCmd
import dirsync.model.synccmd.DeleteFileCmd
import dirsync.model.synccmd.ReplaceFileCmd
import dirsync.model.tree.DirectoryNode
import dirsync.model.tree.FileNode
import dirsync.model.tree.TreePhysMapper
import groovy.transform.TypeChecked
import org.apache.commons.io.IOUtils
@TypeChecked
public class FileTreeDiffApplier {
static <T, U> void copyDirRecursive(DirectoryNode<T> src, TreePhysMapper<T> srcMapper, TreePhysMapper<U> dstMapper) {
dstMapper.createDirectory(src.fullPath)
src.childNodes.each { ce ->
def childNode = ce.value
def childPath = childNode.fullPath
switch (childNode) {
case FileNode:
srcMapper.fileContent(childNode.data).withStream { InputStream inStream ->
dstMapper.createFile(childPath).withStream { OutputStream outStream ->
IOUtils.copy(inStream, outStream)
}
dstMapper.setFileLastUpdatedDate(childPath, childNode.lastModifiedDate)
}
break;
case DirectoryNode:
copyDirRecursive(childNode as DirectoryNode<T>, srcMapper, dstMapper)
break;
default:
throw new RuntimeException("Invalid node class: ${childNode.class.name}")
}
}
}
static <T, U> void handleCopyFile(CopyFileCmd<T, U> fileCopy, TreePhysMapper<T> srcMapper, TreePhysMapper<U> dstMapper) {
def dstPath = fileCopy.dstDir.fullPath ? fileCopy.dstDir.fullPath + '/' + fileCopy.src.name : fileCopy.src.name
srcMapper.fileContent(fileCopy.src.data).withStream { InputStream inStream ->
dstMapper.createFile(dstPath).withStream { OutputStream outStream ->
IOUtils.copy(inStream, outStream)
}
dstMapper.setFileLastUpdatedDate(dstPath, fileCopy.src.lastModifiedDate)
}
}
static <T, U> void handleDeleteDir(DeleteDirCmd<T, U> delDir, TreePhysMapper<T> srcMapper, TreePhysMapper<U> dstMapper) {
dstMapper.removeDirectory(delDir.dirNode.fullPath)
}
static <T, U> void handleDeleteFile(DeleteFileCmd<T, U> delFile, TreePhysMapper<T> srcMapper, TreePhysMapper<U> dstMapper) {
dstMapper.removeFile(delFile.node.fullPath)
}
static <T, U> void handleReplaceFile(ReplaceFileCmd<T, U> replaceFile, TreePhysMapper<T> srcMapper, TreePhysMapper<U> dstMapper) {
dstMapper.removeFile(replaceFile.dst.fullPath)
srcMapper.fileContent(replaceFile.src.data).withStream { InputStream inStream ->
dstMapper.createFile(replaceFile.dst.fullPath).withStream { OutputStream outStream ->
IOUtils.copy(inStream, outStream)
}
dstMapper.setFileLastUpdatedDate(replaceFile.dst.fullPath, replaceFile.src.lastModifiedDate)
}
}
static <T, U> void applyDiffs(List<AbstractSyncCmd<T, U>> diffs, TreePhysMapper<T> srcMapper, TreePhysMapper<U> dstMapper) {
diffs.each { diff ->
switch (diff) {
case CopyDirCmd:
def copyDir = diff as CopyDirCmd<T, U>
copyDirRecursive(copyDir.src, srcMapper, dstMapper)
break
case CopyFileCmd:
handleCopyFile(diff as CopyFileCmd<T, U>, srcMapper, dstMapper)
break
case DeleteDirCmd:
handleDeleteDir(diff as DeleteDirCmd<T, U>, srcMapper, dstMapper)
break
case DeleteFileCmd:
handleDeleteFile(diff as DeleteFileCmd<T, U>, srcMapper, dstMapper)
break
case ReplaceFileCmd:
handleReplaceFile(diff as ReplaceFileCmd<T, U>, srcMapper, dstMapper)
break
default:
throw new RuntimeException("Invalid diff command ${diff.class.name}")
}
}
}
}

View File

@ -1,4 +0,0 @@
package dirsync.model.synccmd
class AbstractSyncCmd<T, U> {
}

View File

@ -1,8 +0,0 @@
package dirsync.model.synccmd
import dirsync.model.tree.DirectoryNode
class CopyDirCmd<T, U> extends AbstractSyncCmd<T, U> {
DirectoryNode<T> src
DirectoryNode<U> dstParentDir
}

View File

@ -1,9 +0,0 @@
package dirsync.model.synccmd
import dirsync.model.tree.DirectoryNode
import dirsync.model.tree.FileNode
class CopyFileCmd<T, U> extends AbstractSyncCmd<T, U> {
FileNode<T> src
DirectoryNode<U> dstDir
}

View File

@ -1,7 +0,0 @@
package dirsync.model.synccmd
import dirsync.model.tree.DirectoryNode
class DeleteDirCmd<T, U> extends AbstractSyncCmd<T, U> {
DirectoryNode<U> dirNode
}

View File

@ -1,7 +0,0 @@
package dirsync.model.synccmd
import dirsync.model.tree.FileNode
class DeleteFileCmd<T, U> extends AbstractSyncCmd<T, U> {
FileNode<U> node
}

View File

@ -1,8 +0,0 @@
package dirsync.model.synccmd
import dirsync.model.tree.FileNode
class ReplaceFileCmd<T, U> extends AbstractSyncCmd<T, U> {
FileNode<T> src
FileNode<U> dst
}

View File

@ -1,27 +0,0 @@
package dirsync.model.tree
import groovy.transform.CompileStatic
@CompileStatic
abstract class AbstractFileTreeNode<T> {
DirectoryNode<T> parent
String name
String fullPath
long lastModifiedDate
T data
boolean equals(o) {
if (this.is(o)) return true
if (getClass() != o.class) return false
AbstractFileTreeNode that = (AbstractFileTreeNode) o
if (name != that.name) return false
return true
}
int hashCode() {
return (name != null ? name.hashCode() : 0)
}
}

View File

@ -1,42 +0,0 @@
package dirsync.model.tree
import groovy.transform.CompileStatic
@CompileStatic
class DirectoryNode<T> extends AbstractFileTreeNode<T> {
Map<String, AbstractFileTreeNode<T>> childNodes = new HashMap<>()
AbstractFileTreeNode<T> getChildren(String name) {
return childNodes[name];
}
AbstractFileTreeNode<T> getChildren(String[] names, int idx) {
if (idx == names.length)
return this
AbstractFileTreeNode<T> c = childNodes[names[idx]]
if (c == null)
return null
if (c instanceof DirectoryNode) {
def d = (DirectoryNode<T>) c;
return d.getChildren(names, idx + 1)
}
return null;
}
AbstractFileTreeNode<T> getByPath(String path) {
path = path.replace('\\', '/')
if (path.endsWith('/'))
path = path.substring(0, path.length() - 1)
if (path.empty) {
return this
}
String[] components = path.split('/')
return getChildren(components, 0)
}
}

View File

@ -1,50 +0,0 @@
package dirsync.model.tree
class FSMapper extends TreePhysMapper<File> {
final File root
FSMapper(File root) {
this.root = root
}
@Override
InputStream fileContent(File file) {
return file.newDataInputStream()
}
@Override
void createDirectory(String dir) {
def target = new File(root, dir)
if (!target.mkdirs()) {
throw new RuntimeException("Failed to create directory ${target.absolutePath}")
}
}
@Override
void removeDirectory(String dir) {
def target = new File(root, dir)
if (!target.deleteDir()) {
throw new RuntimeException("Failed to delete directory ${target.absolutePath}")
}
}
@Override
void removeFile(String path) {
def target = new File(root, path)
if (!target.delete()) {
throw new RuntimeException("Failed to delete file ${target.absolutePath}")
}
}
@Override
OutputStream createFile(String path) {
def target = new File(root, path)
return target.newOutputStream()
}
@Override
void setFileLastUpdatedDate(String path, long date) {
def target = new File(root, path)
target.setLastModified(date)
}
}

View File

@ -1,8 +0,0 @@
package dirsync.model.tree
import groovy.transform.CompileStatic
@CompileStatic
class FileNode<T> extends AbstractFileTreeNode<T> {
long size
}

View File

@ -1,11 +0,0 @@
package dirsync.model.tree
abstract class TreePhysMapper<T> {
abstract InputStream fileContent(T file)
abstract void createDirectory(String dir)
abstract void removeDirectory(String dir)
abstract void removeFile(String path)
abstract OutputStream createFile(String path)
abstract void setFileLastUpdatedDate(String path, long date)
}

View File

@ -1,9 +0,0 @@
package dirsync.model.tree
import groovy.transform.CompileStatic
@CompileStatic
class ZipData {
String zipEntryName
String zipArchiveName
}

View File

@ -1,72 +0,0 @@
package dirsync.model.tree
import dirsync.builder.FileTreeMerger
import dirsync.builder.ZipTreeBuilder
import sun.reflect.generics.reflectiveObjects.NotImplementedException
import java.util.zip.ZipFile
public class ZipTreeMapper extends TreePhysMapper<ZipData> implements Closeable {
Map<String, ZipFile> zipArchives = [:]
void addZipArchive(String zipArchive) {
zipArchives[zipArchive] = new ZipFile(zipArchive)
}
DirectoryNode<ZipData> buildFileTree() {
def root = new DirectoryNode<ZipData>()
zipArchives.each { ze ->
def zipTree = ZipTreeBuilder.buildForZipArchive(ze.key, ze.value)
root = FileTreeMerger.mergeTrees(root, zipTree)
}
return root
}
@Override
void close() throws IOException {
zipArchives.each { ze ->
try { ze.value.close() } catch (Exception ignored) { }
}
}
@Override
InputStream fileContent(ZipData file) {
def archive = zipArchives[file.zipArchiveName]
if (!archive) {
throw new RuntimeException("Archive ${file.zipArchiveName} is not loaded");
}
def zipEntry = archive.getEntry(file.zipEntryName)
if (!zipEntry) {
throw new RuntimeException("File ${file.zipEntryName} not found in archive ${file.zipArchiveName}");
}
return archive.getInputStream(zipEntry)
}
@Override
void createDirectory(String dir) {
throw new NotImplementedException()
}
@Override
void removeDirectory(String dir) {
throw new NotImplementedException()
}
@Override
void removeFile(String path) {
throw new NotImplementedException()
}
@Override
OutputStream createFile(String path) {
throw new NotImplementedException()
}
@Override
void setFileLastUpdatedDate(String path, long date) {
throw new NotImplementedException()
}
}

View File

@ -1,19 +0,0 @@
package gradlecpp
import org.gradle.api.Project
import org.gradle.nativeplatform.NativeBinarySpec
class CppUnitTestExtension {
Project _project
CppUnitTestExtension(Project p) {
_project = p
}
void eachTestExecutable(Closure action) {
_project.binaries.each { NativeBinarySpec bin ->
if (!bin.hasProperty('cppUnitTestsExecutable')) return
action(bin)
}
}
}

View File

@ -1,256 +0,0 @@
package gradlecpp
import gradlecpp.teamcity.TeamCityIntegration
import org.gradle.api.Action
import org.gradle.api.GradleException
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.internal.project.AbstractProject
import org.gradle.model.internal.core.DirectNodeModelAction
import org.gradle.model.internal.core.ModelActionRole
import org.gradle.model.internal.core.ModelPath
import org.gradle.model.internal.core.ModelReference
import org.gradle.model.internal.core.MutableModelNode
import org.gradle.model.internal.core.rule.describe.ModelRuleDescriptor
import org.gradle.model.internal.core.rule.describe.SimpleModelRuleDescriptor
import org.gradle.model.internal.registry.ModelRegistry
import org.gradle.nativeplatform.NativeBinarySpec
import org.gradle.nativeplatform.NativeLibrarySpec
import org.gradle.nativeplatform.internal.AbstractNativeBinarySpec
import org.doomedsociety.gradlecpp.GradleCppUtils
class CppUnitTestPlugin implements Plugin<Project> {
private static class TestExecStatus {
boolean successful
boolean warning
int exitCode
String output
long durationMsec
String cmdLine
String execDir
}
static void onBinariesCreated(Project p, String desc, Closure action) {
ModelRegistry mr = (p as AbstractProject).getModelRegistry()
def modelPath = ModelPath.path("binaries")
ModelRuleDescriptor ruleDescriptor = new SimpleModelRuleDescriptor(desc);
mr.configure(ModelActionRole.Finalize, DirectNodeModelAction.of(ModelReference.of(modelPath), ruleDescriptor, new Action<MutableModelNode>() {
@Override
void execute(MutableModelNode node) {
action()
}
}))
}
@Override
void apply(Project project) {
project.extensions.create('cppUnitTest', CppUnitTestExtension, project)
onBinariesCreated(project, 'CppUnitTestPlugin::AttachUnitTest', {
processCppUnitTests(project)
})
}
/**
* Attaches test tasks to C/C++ libraries build tasks
*/
static void processCppUnitTests(Project p) {
//println "processCppUnitTests::afterEvaluate on ${p.name}: project type is ${p.projectType}"
p.binaries.all { NativeBinarySpec bin ->
if (!(bin.component instanceof NativeLibrarySpec)) {
return
}
def testComponentName = bin.component.name + '_tests'
Collection<NativeBinarySpec> testCandidates = p.binaries.matching { it.component.name == testComponentName && bin.buildType == it.buildType && bin.flavor == it.flavor }
if (testCandidates.size() > 1) {
throw new GradleException("Found >1 test candidates for library ${bin.component.name} in project ${p}: ${testCandidates}")
} else if (!testCandidates.empty) {
def testBinary = testCandidates.first()
GradleCppUtils.onTasksCreated(p, 'CppUnitTestPlugin::AttachUnitTestTask', {
attachTestTaskToCppLibrary(bin, testBinary)
})
String testTaskName = bin.namingScheme.getTaskName('unitTest')
bin.ext.cppUnitTestTask = testTaskName
} else {
throw new GradleException("No tests found for library ${bin.component.name} in project ${p}")
}
}
}
static TestExecStatus runTestExecutable(NativeBinarySpec testSubject, String executable, List<String> params, String phase, int timeout) {
def execFile = new File(executable)
def outDir = new File(testSubject.buildTask.project.buildDir, "tests/${testSubject.name}/run")
outDir.mkdirs()
def outPath = new File(outDir, "${phase}.log")
def cmdParams = [];
cmdParams << execFile.absolutePath
cmdParams.addAll(params)
def execDir = execFile.parentFile
def pb = new ProcessBuilder(cmdParams).redirectErrorStream(true).directory(execDir)
if (!GradleCppUtils.windows) {
pb.environment().put('LD_LIBRARY_PATH', '.')
}
def sout = new StringBuffer()
long startTime = System.currentTimeMillis()
def p = pb.start()
p.consumeProcessOutput(sout, sout)
p.waitForOrKill(timeout * 1000)
long endTime = System.currentTimeMillis()
int exitVal = p.exitValue()
outPath.withWriter('UTF-8') { writer ->
writer.write(sout.toString())
}
return new TestExecStatus(
exitCode: exitVal,
successful: (exitVal == 0 || exitVal == 3),
warning: (exitVal == 3),
output: sout.toString(),
durationMsec: endTime - startTime,
cmdLine: cmdParams.join(' '),
execDir: execDir.absolutePath
)
}
static void dumpTestExecStatus(TestExecStatus stat) {
if (!stat) {
println "Execution of test executable failed"
}
println "Test executable command: ${stat.cmdLine}"
println "Test executable run directury: ${stat.execDir}"
println "Test executable exit code: ${stat.exitCode}"
println "Test executable output BEGIN"
println stat.output
println "Test executable output END"
}
static void attachTestTaskToCppLibrary(NativeBinarySpec libBin, NativeBinarySpec testExecBin) {
Project p = libBin.buildTask.project
def libBinImpl = libBin as AbstractNativeBinarySpec
def libLinkTask = GradleCppUtils.getLinkTask(libBin)
def testExecLinkTask = GradleCppUtils.getLinkTask(testExecBin)
// collect all output files from library and test executable
def depFiles = []
depFiles.addAll(libLinkTask.outputs.files.files)
depFiles.addAll(testExecLinkTask.outputs.files.files)
//create 'tests' task
def testTaskName = libBinImpl.namingScheme.getTaskName('unitTest')
def testTask = p.task(testTaskName, { Task testTask ->
//output dir
def testResDir = new File(p.buildDir, "tests/${libBin.name}")
//inputs/outputs for up-to-date check
testTask.outputs.dir testResDir
testTask.inputs.files depFiles
//dependencies on library and test executable
testTask.dependsOn libLinkTask
testTask.dependsOn testExecLinkTask
// binary build depends on unit test
libBin.buildTask.dependsOn testTask
// extra project-specific dependencies
def testDepsTask = p.tasks.findByName('testDeps')
if (testDepsTask != null) {
testTask.dependsOn testDepsTask
}
// task actions
testTask.doLast {
//temporary file that store info about all tests (XML)
File allTests = File.createTempFile('j4s-testinfo', 'data')
allTests.deleteOnExit()
//fill file with test info
print "Fetching test info..."
def getTestsStatus = runTestExecutable(libBin, testExecBin.executableFile.absolutePath, ['-writeTestInfo', allTests.absolutePath], '__getTests', 5000)
if (!getTestsStatus.successful) {
println " Failed"
dumpTestExecStatus(getTestsStatus)
throw new GradleException("Unable to fetch test names")
}
println " OK"
getTestsStatus = null // allow GC to collect it
// parse the test info file
def root = new XmlSlurper().parse(allTests)
// run all tests
println "Running ${root.test.size()} tests..."
TeamCityIntegration.suiteStarted("unitTests.${libBin.name}")
int failCount = 0;
int warnCount = 0;
root.test.list().each { testInfo ->
def testName = '' + testInfo.@name.text()
def testGroup = '' + testInfo.@group.text()
def testTimeout = ('' + testInfo.@timeout.text()) as int
if (!TeamCityIntegration.writeOutput) {
print " ${testGroup}-${testName}..."
System.out.flush()
}
TeamCityIntegration.testStarted("${testGroup}-${testName}")
def testExecStatus = runTestExecutable(libBin, testExecBin.executableFile.absolutePath, ['-runTest', testGroup, testName], "${testGroup}-${testName}", testTimeout)
if (!testExecStatus.successful) {
if (!TeamCityIntegration.writeOutput) {
println " Failed"
}
TeamCityIntegration.testFailed("${testGroup}-${testName}", "test executable return code is ${testExecStatus.exitCode}", "test executable return code is ${testExecStatus.exitCode}")
dumpTestExecStatus(testExecStatus)
failCount++
} else {
if (!TeamCityIntegration.writeOutput) {
if (testExecStatus.warning) {
println " WARNING"
dumpTestExecStatus(testExecStatus)
warnCount++
}
else
println " OK"
}
}
TeamCityIntegration.testStdOut("${testGroup}-${testName}", testExecStatus.output)
TeamCityIntegration.testFinished("${testGroup}-${testName}", testExecStatus.durationMsec)
}
TeamCityIntegration.suiteFinished("unitTests.${libBin.name}")
if (failCount) {
throw new GradleException("CPP unit tests: ${failCount} tests failed");
}
else if (warnCount) {
println "CPP unit tests: ${warnCount} tests warnings";
}
}
})
}
}

View File

@ -1,17 +0,0 @@
package gradlecpp
import org.gradle.api.Plugin
import org.gradle.api.Project
class RehldsPlayTestPlugin implements Plugin<Project> {
@Override
void apply(Project project) {
project.configurations {
rehlds_playtest_image
}
project.dependencies {
rehlds_playtest_image 'rehlds.testimg:testimg:0.2'
}
}
}

View File

@ -1,80 +0,0 @@
package gradlecpp
import gradlecpp.teamcity.TeamCityIntegration
import org.apache.commons.lang.SystemUtils
import org.gradle.api.DefaultTask
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.TaskAction
import org.gradle.nativeplatform.NativeBinarySpec
import rehlds.testdemo.RehldsDemoRunner
import rehlds.testdemo.RehldsTestParser
class RehldsPlayTestTask extends DefaultTask {
def FileCollection testDemos
def Closure postExtractAction
def File rehldsImageRoot
def File rehldsTestLogs
def NativeBinarySpec testFor
@TaskAction
def doPlay() {
if (!SystemUtils.IS_OS_WINDOWS) {
return
}
if (!testDemos) {
println 'RehldsPlayTestTask: no demos attached to the testDemos property'
}
rehldsImageRoot.mkdirs()
rehldsTestLogs.mkdirs()
def demoRunner = new RehldsDemoRunner(this.project.configurations.rehlds_playtest_image.getFiles(), rehldsImageRoot, postExtractAction)
println "Preparing engine..."
demoRunner.prepareEngine()
println "Running ${testDemos.getFiles().size()} ReHLDS test demos..."
TeamCityIntegration.suiteStarted("rehldsDemo.${testFor.name}")
int failCount = 0;
testDemos.getFiles().each { f ->
def testInfo = RehldsTestParser.parseTestInfo(f)
TeamCityIntegration.testStarted(testInfo.testName)
if (!TeamCityIntegration.writeOutput) {
print "Running ReHLDS test demo ${testInfo.testName} "
System.out.flush()
}
def testRes = demoRunner.runTest(testInfo, rehldsTestLogs)
if (testRes.success) {
if (!TeamCityIntegration.writeOutput) {
println ' OK'
}
} else {
TeamCityIntegration.testFailed(testInfo.testName, "Exit code: ${testRes.returnCode}", "Exit code: ${testRes.returnCode}")
if (!TeamCityIntegration.writeOutput) {
println ' Failed'
println "ReHLDS testdemo ${testInfo.testName} playback failed. Exit status is ${testRes.returnCode}."
println "Dumping console output:"
println testRes.hldsConsoleOutput
}
failCount++
}
TeamCityIntegration.testStdOut(testInfo.testName, testRes.hldsConsoleOutput)
TeamCityIntegration.testFinished(testInfo.testName, testRes.duration)
}
TeamCityIntegration.suiteFinished("rehldsDemo.${testFor.name}")
if (failCount) {
throw new RuntimeException("Rehlds testdemos: failed ${failCount} tests")
}
}
}

View File

@ -1,34 +0,0 @@
package gradlecpp
import org.apache.velocity.Template
import org.apache.velocity.VelocityContext
import org.apache.velocity.app.Velocity
class VelocityUtils {
static {
Properties p = new Properties();
p.setProperty("resource.loader", "class");
p.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.FileResourceLoader");
p.setProperty("class.resource.loader.path", "");
p.setProperty("input.encoding", "UTF-8");
p.setProperty("output.encoding", "UTF-8");
Velocity.init(p);
}
static String renderTemplate(File tplFile, Map<String, ? extends Object> ctx) {
Template tpl = Velocity.getTemplate(tplFile.absolutePath)
if (!tpl) {
throw new RuntimeException("Failed to load velocity template ${tplFile.absolutePath}: not found")
}
def velocityContext = new VelocityContext(ctx)
def sw = new StringWriter()
tpl.merge(velocityContext, sw)
return sw.toString()
}
}

View File

@ -1,84 +0,0 @@
package gradlecpp.teamcity
import groovy.transform.CompileStatic
class TeamCityIntegration {
static final String flowId = System.getenv('TEAMCITY_PROCESS_FLOW_ID')
static final boolean underTeamcity = System.getenv('TEAMCITY_PROJECT_NAME')
static boolean writeOutput = underTeamcity
@CompileStatic
private static String escape(String s) {
StringBuilder sb = new StringBuilder((int)(s.length() * 1.2));
for (char c in s.chars) {
switch (c) {
case '\n': sb.append('|n'); break;
case '\r': sb.append('|r'); break;
case '\'': sb.append('|\''); break;
case '|': sb.append('||'); break;
case ']': sb.append('|]'); break;
default: sb.append(c);
}
}
return sb.toString()
}
@CompileStatic
static void writeMessage(String name, Map params) {
if (!writeOutput) return
StringBuilder sb = new StringBuilder()
sb.append('##teamcity[').append(name)
params.each { e ->
if (e.value != null) {
sb.append(' ').append('' + e.key).append('=\'').append(escape('' + e.value)).append('\'')
}
}
sb.append(']')
println sb.toString()
}
static void suiteStarted(String suiteName) {
writeMessage('testSuiteStarted', [name: suiteName, flowId: flowId ?: null])
}
static void suiteFinished(String suiteName) {
writeMessage('testSuiteFinished', [name: suiteName, flowId: flowId ?: null])
}
static void testStarted(String testName) {
writeMessage('testStarted', [name: testName, flowId: flowId ?: null])
}
static void testStdOut(String testName, String output) {
writeMessage('testStdOut', [name: testName, out: output, flowId: flowId ?: null])
}
static void testFinished(String testName, long durationMs) {
writeMessage('testFinished', [
name: testName,
flowId: flowId ?: null,
duration: (durationMs >= 0) ? durationMs : null
])
}
static void testFailed(String testName, String message, String details) {
writeMessage('testFailed', [
name: testName,
flowId: flowId ?: null,
message: message,
details: details
])
}
static void testIgnored(String testName, String message) {
writeMessage('testIgnored', [
name: testName,
flowId: flowId ?: null,
message: message,
])
}
}

View File

@ -1,81 +0,0 @@
package rehlds.testdemo
import dirsync.builder.FileSystemTreeBuilder
import dirsync.merger.FileTreeComparator
import dirsync.merger.FileTreeDiffApplier
import dirsync.model.tree.DirectoryNode
import dirsync.model.tree.FSMapper
import dirsync.model.tree.ZipData
import dirsync.model.tree.ZipTreeMapper
class RehldsDemoRunner {
ZipTreeMapper rehldsImage = new ZipTreeMapper()
File rootDir
DirectoryNode<ZipData> engineImageTree
Closure postExtract
static class TestResult {
boolean success
int returnCode
String hldsConsoleOutput
long duration
}
RehldsDemoRunner(Collection<File> engineImageZips, File rootDir, Closure postExtract) {
this.rootDir = rootDir
engineImageZips.each { f ->
rehldsImage.addZipArchive(f.absolutePath)
}
engineImageTree = rehldsImage.buildFileTree()
this.postExtract = postExtract
}
void prepareEngine() {
def existingTree = FileSystemTreeBuilder.buildFileSystemTree(rootDir)
def cmds = FileTreeComparator.mergeTrees(engineImageTree, existingTree)
FSMapper fsMapper = new FSMapper(rootDir)
FileTreeDiffApplier.applyDiffs(cmds, rehldsImage, fsMapper)
if (postExtract != null) {
postExtract.run()
}
}
TestResult runTest(RehldsTestInfo info, File testLogDir) {
long startTime = System.currentTimeMillis()
prepareEngine()
def outPath = new File(testLogDir, "${info.testName}_run.log")
def cmdParams = []
cmdParams << new File(rootDir, 'hlds.exe').absolutePath
cmdParams.addAll(info.hldsArgs)
if (info.rehldsExtraArgs) {
cmdParams.addAll(info.rehldsExtraArgs)
}
cmdParams << '--rehlds-test-play' << info.testBinFile.absolutePath
def pb = new ProcessBuilder(cmdParams).redirectErrorStream(true).directory(rootDir)
def sout = new StringBuffer()
def p = pb.start()
p.consumeProcessOutput(sout, sout)
p.waitForOrKill(info.timeoutSeconds * 1000)
int exitVal = p.exitValue()
outPath.withWriter('UTF-8') { writer ->
writer.write(sout.toString())
}
long endTime = System.currentTimeMillis()
return new TestResult(
success: (exitVal == 777),
returnCode: exitVal,
hldsConsoleOutput: sout.toString(),
duration: endTime - startTime
)
}
}

View File

@ -1,9 +0,0 @@
package rehlds.testdemo
class RehldsTestInfo {
String testName
List<String> hldsArgs
String rehldsExtraArgs
int timeoutSeconds
File testBinFile
}

View File

@ -1,63 +0,0 @@
package rehlds.testdemo
import groovy.util.slurpersupport.GPathResult
import org.apache.commons.io.IOUtils
import java.util.zip.ZipFile
class RehldsTestParser {
static final String REHLDS_TEST_METAINFO_FILE = 'rehlds_test_metainfo.xml'
static RehldsTestInfo parseTestInfo(File testArchive) {
def zf = new ZipFile(testArchive);
try {
def metaInfoEntry = zf.getEntry(REHLDS_TEST_METAINFO_FILE)
if (metaInfoEntry == null) {
throw new RuntimeException("Unable to open ${REHLDS_TEST_METAINFO_FILE} in ${testArchive.absolutePath}")
}
GPathResult metaInfo = null
zf.getInputStream(metaInfoEntry).withStream { InputStream ins ->
metaInfo = new XmlSlurper().parse(ins)
}
RehldsTestInfo testInfo = new RehldsTestInfo(
testName: metaInfo.name.text(),
hldsArgs: metaInfo.runArgs.arg.list().collect { it.text().trim() },
timeoutSeconds: metaInfo.timeout.text() as int
)
//validate testInfo
if (!testInfo.testName) {
throw new RuntimeException("Error parsing ${testArchive.absolutePath}: test name is not specified")
}
if (!testInfo.hldsArgs) {
throw new RuntimeException("Error parsing ${testArchive.absolutePath}: run arguments are not specified")
}
if (testInfo.timeoutSeconds <= 0) {
throw new RuntimeException("Error parsing ${testArchive.absolutePath}: bad timeout")
}
def testBinName = testInfo.testName + '.bin'
def testBinEntry = zf.getEntry(testBinName)
if (testBinEntry == null) {
throw new RuntimeException("Error parsing ${testArchive.absolutePath}: test binary ${testBinName} not found inside archive")
}
testInfo.testBinFile = File.createTempFile(testBinName, 'rehlds')
testInfo.testBinFile.deleteOnExit()
zf.getInputStream(testBinEntry).withStream { InputStream ins ->
testInfo.testBinFile.withOutputStream { OutputStream os ->
IOUtils.copy(ins, os)
}
}
return testInfo
} finally {
try { zf.close() } catch (Exception ignored) { }
}
}
}

View File

@ -1,16 +0,0 @@
package versioning
import groovy.transform.CompileStatic
import groovy.transform.TypeChecked
import org.joda.time.DateTime
@CompileStatic @TypeChecked
class GitInfo {
boolean localChanges
DateTime commitDate
String branch
String tag
String commitSHA
String commitURL
Integer commitCount
}

View File

@ -1,126 +0,0 @@
package versioning
import java.util.Set;
import groovy.transform.CompileStatic
import groovy.transform.TypeChecked
import org.eclipse.jgit.api.Git
import org.eclipse.jgit.api.Status;
import org.eclipse.jgit.lib.ObjectId
import org.eclipse.jgit.lib.Repository
import org.eclipse.jgit.lib.StoredConfig
import org.eclipse.jgit.revwalk.RevCommit
import org.eclipse.jgit.revwalk.RevWalk
import org.eclipse.jgit.storage.file.FileRepositoryBuilder
import org.joda.time.DateTime
import org.joda.time.DateTimeZone
@CompileStatic @TypeChecked
class GitVersioner {
static GitInfo versionForDir(String dir) {
versionForDir(new File(dir))
}
static int getCountCommit(Repository repo) {
Iterable<RevCommit> commits = Git.wrap(repo).log().call()
int count = 0;
commits.each {
count++;
}
return count;
}
static String prepareUrlToCommits(String url) {
if (url == null) {
// default remote url
return "https://github.com/dreamstalker/rehlds/commit/";
}
StringBuilder sb = new StringBuilder();
String childPath;
int pos = url.indexOf('@');
if (pos != -1) {
childPath = url.substring(pos + 1, url.lastIndexOf('.git')).replace(':', '/');
sb.append('https://');
} else {
pos = url.lastIndexOf('.git');
childPath = (pos == -1) ? url : url.substring(0, pos);
}
// support for different links to history of commits
if (url.indexOf('bitbucket.org') != -1) {
sb.append(childPath).append('/commits/');
} else {
sb.append(childPath).append('/commit/');
}
return sb.toString();
}
// check uncommited changes
static boolean getUncommittedChanges(Repository repo) {
Git git = new Git(repo);
Status status = git.status().call();
Set<String> uncommittedChanges = status.getUncommittedChanges();
for(String uncommitted : uncommittedChanges) {
return true;
}
return false;
}
static GitInfo versionForDir(File dir) {
FileRepositoryBuilder builder = new FileRepositoryBuilder()
Repository repo = builder.setWorkTree(dir)
.findGitDir()
.build()
ObjectId head = repo.resolve('HEAD')
if (!head) {
return null
}
final StoredConfig cfg = repo.getConfig();
def commit = new RevWalk(repo).parseCommit(head)
if (!commit) {
throw new RuntimeException("Can't find last commit.")
}
def localChanges = getUncommittedChanges(repo);
def commitDate = new DateTime(1000L * commit.commitTime, DateTimeZone.UTC);
if (localChanges) {
commitDate = new DateTime();
}
def branch = repo.getBranch()
String url = null;
String remote_name = cfg.getString("branch", branch, "remote");
if (remote_name == null) {
for (String remotes : cfg.getSubsections("remote")) {
if (url != null) {
println 'Found a second remote: (' + remotes + '), url: (' + cfg.getString("remote", remotes, "url") + ')'
continue;
}
url = cfg.getString("remote", remotes, "url");
}
} else {
url = cfg.getString("remote", remote_name, "url");
}
String commitURL = prepareUrlToCommits(url);
String tag = repo.tags.find { kv -> kv.value.objectId == commit.id }?.key
String commitSHA = commit.getId().abbreviate(7).name();
return new GitInfo(
localChanges: localChanges,
commitDate: commitDate,
branch: branch,
tag: tag,
commitSHA: commitSHA,
commitURL: commitURL,
commitCount: getCountCommit(repo)
)
}
}

View File

@ -1,56 +0,0 @@
package versioning
import groovy.transform.CompileStatic
import groovy.transform.ToString
import groovy.transform.TypeChecked
import org.joda.time.format.DateTimeFormat
import org.joda.time.DateTime
@CompileStatic @TypeChecked
@ToString(includeNames = true)
class RehldsVersionInfo {
int majorVersion
int minorVersion
Integer maintenanceVersion
String suffix
boolean localChanges
DateTime commitDate
String commitSHA
String commitURL
Integer commitCount
String asMavenVersion(boolean extra = true) {
StringBuilder sb = new StringBuilder()
sb.append(majorVersion).append('.' + minorVersion);
if (maintenanceVersion != null) {
sb.append('.' + maintenanceVersion);
}
if (commitCount != null) {
sb.append('.' + commitCount)
}
if (extra && suffix) {
sb.append('-' + suffix)
}
// do mark for this build like a modified version
if (extra && localChanges) {
sb.append('+m');
}
return sb.toString()
}
String asCommitDate() {
String pattern = "MMM d yyyy";
if (commitDate.getDayOfMonth() >= 10) {
pattern = "MMM d yyyy";
}
return DateTimeFormat.forPattern(pattern).withLocale(Locale.ENGLISH).print(commitDate);
}
String asCommitTime() {
return DateTimeFormat.forPattern('HH:mm:ss').withLocale(Locale.ENGLISH).print(commitDate);
}
}

View File

@ -1,44 +0,0 @@
package dirsync.builder
import org.junit.Test
import java.io.File
import dirsync.builder.ZipTreeBuilder
import java.util.zip.ZipEntry
import java.util.zip.ZipFile
import java.util.zip.ZipOutputStream;
import static org.junit.Assert.*;
class ZipTreeBuilderTest {
@Test
void test1() {
File zipFile = File.createTempFile('ZipTreeBuilderTest', 'zip')
zipFile.deleteOnExit()
new ZipOutputStream(zipFile.newDataOutputStream()).withStream { ZipOutputStream zos ->
zos.putNextEntry(new ZipEntry('aRootFile1.txt'))
zos.write(65) //'A'
zos.putNextEntry(new ZipEntry('dir1/'))
zos.putNextEntry(new ZipEntry('dir1/dir2/'))
zos.putNextEntry(new ZipEntry('dir1/dir2/d1d2f1.txt'))
zos.write(65); zos.write(66) //'AB'
zos.putNextEntry(new ZipEntry('dir1/d1f1.txt'))
zos.write(65); zos.write(66); zos.write(67) //'ABC'
zos.putNextEntry(new ZipEntry('zRootFile2.txt'))
zos.write(65); zos.write(66); zos.write(67); zos.write(68) //'ABCD'
}
ZipFile zf = new ZipFile(zipFile.absolutePath)
def tree = ZipTreeBuilder.buildForZipArchive(zipFile.absolutePath, zf)
assert tree.childNodes.size() == 3
}
}

View File

@ -1,79 +0,0 @@
import org.doomedsociety.gradlecpp.cfg.ToolchainConfigUtils
import org.doomedsociety.gradlecpp.msvc.MsvcToolchainConfig
import org.doomedsociety.gradlecpp.toolchain.icc.Icc
import org.doomedsociety.gradlecpp.toolchain.icc.IccCompilerPlugin
import org.doomedsociety.gradlecpp.gcc.GccToolchainConfig
import org.gradle.nativeplatform.NativeBinarySpec
import org.gradle.nativeplatform.NativeLibrarySpec
import org.gradle.nativeplatform.toolchain.VisualCpp
apply plugin: 'c'
apply plugin: IccCompilerPlugin
apply plugin: GccCompilerPlugin
void setupToolchain(NativeBinarySpec b) {
def cfg = rootProject.createToolchainConfig(b)
if (cfg instanceof MsvcToolchainConfig) {
cfg.compilerOptions.pchConfig = new MsvcToolchainConfig.PrecompiledHeadersConfig(
enabled: true,
pchHeader: 'bzlib_private.h',
pchSourceSet: 'bz2_pch'
)
}
ToolchainConfigUtils.apply(project, cfg, b)
}
model {
buildTypes {
debug
release
}
platforms {
x86 {
architecture "x86"
}
}
toolChains {
visualCpp(VisualCpp)
if (project.hasProperty("useGcc")) {
gcc(Gcc)
} else {
icc(Icc)
}
}
components {
bzip2(NativeLibrarySpec) {
targetPlatform 'x86'
sources {
bz2_main(CSourceSet) {
source {
srcDir "src"
include "**/*.c"
exclude "precompiled.c"
}
exportedHeaders {
srcDir "include"
}
}
bz2_pch(CSourceSet) {
source {
srcDir "src"
include "precompiled.c"
}
exportedHeaders {
srcDir "include"
}
}
}
binaries.all { NativeBinarySpec b -> project.setupToolchain(b) }
}
}
}

View File

@ -1,64 +0,0 @@
import org.doomedsociety.gradlecpp.cfg.ToolchainConfigUtils
import org.doomedsociety.gradlecpp.msvc.MsvcToolchainConfig
import org.doomedsociety.gradlecpp.toolchain.icc.Icc
import org.doomedsociety.gradlecpp.toolchain.icc.IccCompilerPlugin
import org.doomedsociety.gradlecpp.gcc.GccToolchainConfig
import org.gradle.nativeplatform.NativeBinarySpec
import org.gradle.nativeplatform.NativeLibrarySpec
apply plugin: 'cpp'
apply plugin: IccCompilerPlugin
apply plugin: GccCompilerPlugin
void setupToolchain(NativeBinarySpec b) {
def cfg = rootProject.createToolchainConfig(b)
ToolchainConfigUtils.apply(project, cfg, b)
}
model {
buildTypes {
debug
release
}
platforms {
x86 {
architecture "x86"
}
}
toolChains {
visualCpp(VisualCpp)
if (project.hasProperty("useGcc")) {
gcc(Gcc)
} else {
icc(Icc)
}
}
components {
cppunitlite(NativeLibrarySpec) {
targetPlatform 'x86'
sources {
cppul_main(CppSourceSet) {
source {
srcDir "src"
include "**/*.cpp"
}
exportedHeaders {
srcDir "include"
}
}
}
binaries.all { NativeBinarySpec b ->
project.setupToolchain(b)
}
}
}
}

View File

@ -1,35 +0,0 @@
apply plugin: 'java'
apply plugin: 'groovy'
group = 'org.rehlds.flightrec'
version = rootProject.version
sourceCompatibility = '1.7'
targetCompatibility = '1.7'
repositories {
mavenCentral()
}
dependencies {
testCompile 'org.codehaus.groovy:groovy-all:2.4.5'
testCompile "junit:junit:4.12"
compile project(':flightrec/decoder_api')
}
task uberjar(type: Jar, dependsOn: ['check', ':flightrec/decoder_api:build']) {
from files(sourceSets.main.output.classesDir)
from { configurations.runtime.collect { it.isDirectory() ? it : zipTree(it) } }
exclude('META-INF/*.DSA', 'META-INF/*.RSA', 'META-INF/*.SF', 'META-INF/*.LIST') //exclude all signing stuff
manifest {
attributes 'Main-Class': 'org.rehlds.flightrec.main.FlightRecorder'
attributes 'Implementation-Vendor': 'Sun Microsystems, Inc'
attributes 'Implementation-Title': 'Java Runtime Environment'
attributes 'Implementation-Version': '1.7.0'
}
}
tasks.withType(AbstractCompile) {
options.encoding = 'UTF-8'
}

View File

@ -1,47 +0,0 @@
@if "%DEBUG%" == "" @echo off
setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
set CMD_LINE_ARGS=%*
"%JAVA_EXE%" -jar "%DIRNAME%/decoder.jar" %CMD_LINE_ARGS%
:end
goto mainEnd
:fail
exit /b 1
:mainEnd
endlocal

View File

@ -1,156 +0,0 @@
// Copyright 2011 Google Inc. All rights reserved.
package com.google.cloud;
import java.util.zip.Checksum;
/**
* This class generates a CRC32C checksum, defined by rfc3720 section B.4.
*
*
*/
public final class Crc32c implements Checksum {
private static final long[] CRC_TABLE = {
0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4,
0xc79a971f, 0x35f1141c, 0x26a1e7e8, 0xd4ca64eb,
0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b,
0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24,
0x105ec76f, 0xe235446c, 0xf165b798, 0x030e349b,
0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384,
0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54,
0x5d1d08bf, 0xaf768bbc, 0xbc267848, 0x4e4dfb4b,
0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,
0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35,
0xaa64d611, 0x580f5512, 0x4b5fa6e6, 0xb93425e5,
0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa,
0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45,
0xf779deae, 0x05125dad, 0x1642ae59, 0xe4292d5a,
0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a,
0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595,
0x417b1dbc, 0xb3109ebf, 0xa0406d4b, 0x522bee48,
0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,
0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687,
0x0c38d26c, 0xfe53516f, 0xed03a29b, 0x1f682198,
0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927,
0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38,
0xdbfc821c, 0x2997011f, 0x3ac7f2eb, 0xc8ac71e8,
0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7,
0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096,
0xa65c047d, 0x5437877e, 0x4767748a, 0xb50cf789,
0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,
0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46,
0x7198540d, 0x83f3d70e, 0x90a324fa, 0x62c8a7f9,
0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6,
0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36,
0x3cdb9bdd, 0xceb018de, 0xdde0eb2a, 0x2f8b6829,
0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c,
0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93,
0x082f63b7, 0xfa44e0b4, 0xe9141340, 0x1b7f9043,
0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,
0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3,
0x55326b08, 0xa759e80b, 0xb4091bff, 0x466298fc,
0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c,
0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033,
0xa24bb5a6, 0x502036a5, 0x4370c551, 0xb11b4652,
0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d,
0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d,
0xef087a76, 0x1d63f975, 0x0e330a81, 0xfc588982,
0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,
0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622,
0x38cc2a06, 0xcaa7a905, 0xd9f75af1, 0x2b9cd9f2,
0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed,
0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530,
0x0417b1db, 0xf67c32d8, 0xe52cc12c, 0x1747422f,
0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff,
0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0,
0xd3d3e1ab, 0x21b862a8, 0x32e8915c, 0xc083125f,
0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,
0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90,
0x9e902e7b, 0x6cfbad78, 0x7fab5e8c, 0x8dc0dd8f,
0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee,
0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1,
0x69e9f0d5, 0x9b8273d6, 0x88d28022, 0x7ab90321,
0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e,
0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81,
0x34f4f86a, 0xc69f7b69, 0xd5cf889d, 0x27a40b9e,
0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,
0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351
};
private static final long LONG_MASK = 0xffffffffL;
private static final long BYTE_MASK = 0xff;
private long crc;
public Crc32c() {
crc = 0;
}
/**
* Updates the checksum with a new byte.
* @param b the new byte.
*/
@Override
public void update(int b) {
long newCrc = crc;
newCrc = updateByte((byte) b, newCrc);
crc = newCrc;
}
/**
* Updates the checksum with an array of bytes.
* @param bArray the array of bytes.
* @param off the offset into the array where the update should begin.
* @param len the length of data to examine.
*/
@Override
public void update(byte[] bArray, int off, int len) {
long newCrc = crc;
for (int i = off; i < off + len; i++) {
newCrc = updateByte(bArray[i], newCrc);
}
crc = newCrc;
}
public void update(byte[] bArray) {
update(bArray, 0, bArray.length);
}
/**
* Returns the value of the checksum.
* @return the long representation of the checksum (high bits set to zero).
*/
@Override
public long getValue() {
return crc;
}
/**
* Returns the value of the checksum.
* @return the 4-byte array representation of the checksum in network byte order (big endian).
*/
public byte[] getValueAsBytes() {
long value = crc;
byte[] result = new byte[4];
for (int i = 3; i >= 0; i--) {
result[i] = (byte) (value & 0xffL);
value >>= 8;
}
return result;
}
/**
* Resets the crc.
*/
@Override
public void reset() {
crc = 0;
}
private long updateByte(byte newByte, long crc) {
byte b = (byte) (newByte & BYTE_MASK);
int index = (int) ((crc ^ b) & BYTE_MASK);
return (CRC_TABLE[index] ^ (crc >> 8)) & LONG_MASK;
}
}

View File

@ -1,14 +0,0 @@
package org.rehlds.flightrec;
public class Consts {
public final static String META_HEADER_SIG_STR = "REHLDS_FLIGHTREC_META";
public final static String DATA_HEADER_SIG_STR = "REHLDS_FLIGHTREC_DATA";
public static byte[] META_HEADER_SIG_BYTES = (META_HEADER_SIG_STR + META_HEADER_SIG_STR + META_HEADER_SIG_STR + ":").getBytes();
public static byte[] DATA_HEADER_SIG_BYTES = (DATA_HEADER_SIG_STR + DATA_HEADER_SIG_STR + DATA_HEADER_SIG_STR + ":").getBytes();
public static int META_HEADER_SIZE = 128;
public static int DATA_HEADER_SIZE = 128;
public static int MAX_HEADER_SIZE = Math.max(META_HEADER_SIZE, DATA_HEADER_SIZE);
}

View File

@ -1,22 +0,0 @@
package org.rehlds.flightrec.decoders.rehlds;
import org.rehlds.flightrec.api.DecodedExtraData;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.api.FlightrecMessageType;
import org.rehlds.flightrec.api.MessageDecoder;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
public class AllocEntPrivateDataV1Decoder implements MessageDecoder {
@Override
public FlightrecMessageType getMessageType() {
return new FlightrecMessageType("rehlds", "AllocEntPrivateData", 1, false);
}
@Override
public DecodedExtraData decode(FlightrecMessage msg) {
UtilSizeBuf sb = msg.getDataSizebuf();
long ptr = sb.readUInt32();
return DecodedExtraData.create("pPrivData", "0x" + Long.toHexString(ptr));
}
}

View File

@ -1,22 +0,0 @@
package org.rehlds.flightrec.decoders.rehlds;
import org.rehlds.flightrec.api.DecodedExtraData;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.api.FlightrecMessageType;
import org.rehlds.flightrec.api.MessageDecoder;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
public class AllocEntPrivateDataV2Decoder implements MessageDecoder {
@Override
public FlightrecMessageType getMessageType() {
return new FlightrecMessageType("rehlds", "AllocEntPrivateData", 2, false);
}
@Override
public DecodedExtraData decode(FlightrecMessage msg) {
UtilSizeBuf sb = msg.getDataSizebuf();
long ptr = sb.readUInt32();
long size = sb.readUInt32();
return DecodedExtraData.create("pPrivData", "0x" + Long.toHexString(ptr), "size", "" + size);
}
}

View File

@ -1,30 +0,0 @@
package org.rehlds.flightrec.decoders.rehlds;
import org.rehlds.flightrec.api.DecodedExtraData;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.api.FlightrecMessageType;
import org.rehlds.flightrec.api.MessageDecoder;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
public class FrameV1Decoder implements MessageDecoder {
@Override
public FlightrecMessageType getMessageType() {
return new FlightrecMessageType("rehlds", "Frame", 1, true);
}
DecodedExtraData decodeStart(UtilSizeBuf sb) {
double startTime = sb.readDouble();
return DecodedExtraData.create("startTime", "" + startTime);
}
DecodedExtraData decodeEnd(UtilSizeBuf sb) {
return DecodedExtraData.EMPTY;
}
@Override
public DecodedExtraData decode(FlightrecMessage msg) {
UtilSizeBuf sb = msg.getDataSizebuf();
return msg.isEnterMessage() ? decodeStart(sb) : decodeEnd(sb);
}
}

View File

@ -1,32 +0,0 @@
package org.rehlds.flightrec.decoders.rehlds;
import org.rehlds.flightrec.api.DecodedExtraData;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.api.FlightrecMessageType;
import org.rehlds.flightrec.api.MessageDecoder;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
public class FrameV2Decoder implements MessageDecoder {
@Override
public FlightrecMessageType getMessageType() {
return new FlightrecMessageType("rehlds", "Frame", 2, true);
}
DecodedExtraData decodeStart(UtilSizeBuf sb) {
long frameId = sb.readInt64();
double startTime = sb.readDouble();
return DecodedExtraData.create("frameId", "" + frameId, "startTime", "" + startTime);
}
DecodedExtraData decodeEnd(UtilSizeBuf sb) {
long frameId = sb.readInt64();
return DecodedExtraData.create("frameId", "" + frameId);
}
@Override
public DecodedExtraData decode(FlightrecMessage msg) {
UtilSizeBuf sb = msg.getDataSizebuf();
return msg.isEnterMessage() ? decodeStart(sb) : decodeEnd(sb);
}
}

View File

@ -1,21 +0,0 @@
package org.rehlds.flightrec.decoders.rehlds;
import org.rehlds.flightrec.api.DecodedExtraData;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.api.FlightrecMessageType;
import org.rehlds.flightrec.api.MessageDecoder;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
public class FreeEntPrivateDataV1Decoder implements MessageDecoder {
@Override
public FlightrecMessageType getMessageType() {
return new FlightrecMessageType("rehlds", "FreeEntPrivateData", 1, false);
}
@Override
public DecodedExtraData decode(FlightrecMessage msg) {
UtilSizeBuf sb = msg.getDataSizebuf();
long ptr = sb.readUInt32();
return DecodedExtraData.create("pPrivData", "0x" + Long.toHexString(ptr));
}
}

View File

@ -1,22 +0,0 @@
package org.rehlds.flightrec.decoders.rehlds;
import org.rehlds.flightrec.api.DecodedExtraData;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.api.FlightrecMessageType;
import org.rehlds.flightrec.api.MessageDecoder;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
public class LogV1Decoder implements MessageDecoder {
@Override
public FlightrecMessageType getMessageType() {
return new FlightrecMessageType("rehlds", "Log", 1, false);
}
@Override
public DecodedExtraData decode(FlightrecMessage msg) {
UtilSizeBuf sb = msg.getDataSizebuf();
String prefix = sb.readString();
String message = sb.readString();
return DecodedExtraData.create("prefix", prefix, "message", message);
}
}

View File

@ -1,18 +0,0 @@
package org.rehlds.flightrec.decoders.rehlds;
import org.rehlds.flightrec.api.SimpleDecoderModule;
public class RehldsDecodersModule extends SimpleDecoderModule {
public RehldsDecodersModule() {
super("Rehlds decoders (built-in)", "0.2");
registerDecoder(new FrameV1Decoder());
registerDecoder(new FreeEntPrivateDataV1Decoder());
registerDecoder(new AllocEntPrivateDataV1Decoder());
registerDecoder(new FrameV2Decoder());
registerDecoder(new LogV1Decoder());
registerDecoder(new AllocEntPrivateDataV2Decoder());
}
}

View File

@ -1,9 +0,0 @@
package org.rehlds.flightrec.filescan;
import java.util.ArrayList;
import java.util.List;
public class FileScanResult {
public List<HeaderScanResult> metaHeaders = new ArrayList<>();
public List<HeaderScanResult> dataHeaders = new ArrayList<>();
}

View File

@ -1,100 +0,0 @@
package org.rehlds.flightrec.filescan;
import com.google.cloud.Crc32c;
import org.rehlds.flightrec.api.util.UtilByteBuffer;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
import static org.rehlds.flightrec.Consts.*;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.Arrays;
import java.util.List;
public class FlightRecFileScanner {
RandomAccessFile file;
long fileLen;
FileScanResult scanRes = new FileScanResult();
private byte readBuf[] = new byte[65536];
private byte header[] = new byte[MAX_HEADER_SIZE];
private UtilSizeBuf headerSBuf = new UtilSizeBuf("header", new UtilByteBuffer(header), 0, header.length);
private FlightRecFileScanner(RandomAccessFile file) throws IOException {
this.file = file;
this.fileLen = file.length();
}
private void examineHeader(byte[] data, int size, int pos) throws IOException {
if (pos + MAX_HEADER_SIZE < size) {
System.arraycopy(data, pos, header, 0, MAX_HEADER_SIZE);
} else {
return; //will be read in next iteration
}
headerSBuf.reset();
String matchedType = null;
if (Arrays.equals(META_HEADER_SIG_BYTES, Arrays.copyOfRange(header, 0, META_HEADER_SIG_BYTES.length))) {
matchedType = META_HEADER_SIG_STR;
headerSBuf.skip(META_HEADER_SIG_BYTES.length);
} else if (Arrays.equals(DATA_HEADER_SIG_BYTES, Arrays.copyOfRange(header, 0, DATA_HEADER_SIG_BYTES.length))) {
matchedType = DATA_HEADER_SIG_STR;
headerSBuf.skip(DATA_HEADER_SIG_BYTES.length);
}
if (matchedType == null) {
return;
}
List<HeaderScanResult> resList = (matchedType.equals(META_HEADER_SIG_STR)) ? scanRes.metaHeaders : scanRes.dataHeaders;
int version = headerSBuf.readInt32();
int allocSize = headerSBuf.readInt32();
Crc32c crc32 = new Crc32c();
crc32.update(header, 0, headerSBuf.tell());
long calculatedChecksum = crc32.getValue();
long bufChecksum = headerSBuf.readUInt32();
if (calculatedChecksum != bufChecksum) {
resList.add(new HeaderScanResult(file.getFilePointer() - size + pos, allocSize, false, "Checksum mismatch", version));
return;
}
long endPos = file.getFilePointer() - size + pos + allocSize;
if (endPos > file.length()) {
resList.add(new HeaderScanResult(file.getFilePointer() - size + pos, allocSize, false, "Regions partially lays outside the file", version));
return;
}
resList.add(new HeaderScanResult(file.getFilePointer() - size + pos, allocSize, true, null, version));
}
private void scanForHeaders(byte[] data, int size) throws IOException {
int maxHeaderSize = Math.max(META_HEADER_SIG_STR.length(), DATA_HEADER_SIG_STR.length());
for (int i = 0; i < size - maxHeaderSize; i++) {
if (data[i + 15] == META_HEADER_SIG_BYTES[15] && data[i + 16] == META_HEADER_SIG_BYTES[16] && data[i + 17] == META_HEADER_SIG_BYTES[17] && data[i + 18] == META_HEADER_SIG_BYTES[18]) {
examineHeader(data, size, i);
} else if (data[i + 15] == DATA_HEADER_SIG_BYTES[15] && data[i + 16] == DATA_HEADER_SIG_BYTES[16] && data[i + 17] == DATA_HEADER_SIG_BYTES[17] && data[i + 18] == DATA_HEADER_SIG_BYTES[18]) {
examineHeader(data, size, i);
}
}
}
private void doScan() throws IOException {
file.seek(0);
int read;
while (-1 != (read = file.read(readBuf))) {
scanForHeaders(readBuf, read);
if (read == readBuf.length) {
file.seek(file.getFilePointer() - MAX_HEADER_SIZE * 2);
}
}
}
public static FileScanResult scan(RandomAccessFile file) throws IOException {
FlightRecFileScanner scanner = new FlightRecFileScanner(file);
scanner.doScan();
return scanner.scanRes;
}
}

View File

@ -1,17 +0,0 @@
package org.rehlds.flightrec.filescan;
public class HeaderScanResult {
public long pos;
public int len;
public boolean valid;
public String error;
public int version;
public HeaderScanResult(long pos, int len, boolean valid, String error, int version) {
this.pos = pos;
this.len = len;
this.valid = valid;
this.error = error;
this.version = version;
}
}

View File

@ -1,12 +0,0 @@
package org.rehlds.flightrec.logparser;
public class DataHeader {
public int prevItrLastPos;
public DataHeader(int prevItrLastPos) {
this.prevItrLastPos = prevItrLastPos;
}
public DataHeader() {
}
}

View File

@ -1,179 +0,0 @@
package org.rehlds.flightrec.logparser;
import org.rehlds.flightrec.api.EntranceKind;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.api.FlightrecMessageDef;
import org.rehlds.flightrec.api.FlightrecMessageType;
import org.rehlds.flightrec.filescan.HeaderScanResult;
import org.rehlds.flightrec.api.util.UtilByteBuffer;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
import static org.rehlds.flightrec.Consts.*;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.*;
public class FlightLogParser {
UtilByteBuffer metaRegion;
UtilByteBuffer dataRegion;
MetaHeader metaHeader;
RecorderState recorderState;
DataHeader dataHeader;
Map<Integer, FlightrecMessageType> msgTypes = new HashMap<>();
List<FlightrecMessage> messages = new ArrayList<>();
void parseMessageDefinition(UtilSizeBuf sbuf) {
int msgId = sbuf.readUInt16();
String module = sbuf.readString();
String messageName = sbuf.readString();
long msgVersion = sbuf.readUInt32();
boolean inOut = sbuf.readBool();
FlightrecMessageDef msgDef = new FlightrecMessageDef(module, messageName, msgVersion, inOut, msgId);
if (msgTypes.containsKey(msgId)) {
System.out.println("Duplicate message id: " + msgTypes.get(msgId) + " and " + msgDef);
}
msgTypes.put(msgId, msgDef.msgType);
}
void parseMetaRegion() {
metaHeader = new MetaHeader();
UtilSizeBuf metaSBuf = new UtilSizeBuf("meta region", metaRegion);
metaSBuf.skip(META_HEADER_SIG_BYTES.length); //skip signature
metaSBuf.readInt32(); //version
metaSBuf.readInt32(); //allocSize
metaSBuf.readInt32(); //checksum
metaHeader.numDefinitions = metaSBuf.readInt32();
metaHeader.metaRegionPos = metaSBuf.readInt32();
recorderState = new RecorderState();
recorderState.wpos = metaSBuf.readInt32();
recorderState.lastMsgBeginPos = metaSBuf.readInt32();
recorderState.curMessage = metaSBuf.readUInt16();
metaSBuf = new UtilSizeBuf("meta region defs", metaRegion, META_HEADER_SIZE, metaHeader.metaRegionPos);
for (int i = 0; i < metaHeader.numDefinitions; i++) {
int defKind = metaSBuf.readUInt8();
switch (defKind) {
case 1: //MRT_MESSAGE_DEF
parseMessageDefinition(metaSBuf);
break;
default:
throw new RuntimeException("Invalid meta definition type" + defKind);
}
}
dataHeader = new DataHeader();
dataHeader.prevItrLastPos = dataRegion.readInt32(DATA_HEADER_SIG_BYTES.length + 12);
}
public FlightLogParser(UtilByteBuffer metaRegion, UtilByteBuffer dataRegion) {
this.metaRegion = metaRegion;
this.dataRegion = dataRegion;
}
void doParseMessage(UtilSizeBuf msg) {
int opc = msg.readUInt16();
boolean entrance = (0 != (opc & 0x8000));
opc &= 0x7FFF;
FlightrecMessageType msgType = msgTypes.get(opc);
if (msgType == null) {
throw new RuntimeException("Invalid message opcode @" + Long.toHexString(msg.getAbsoluteCurrentPos() - 2) + ": " + opc);
}
EntranceKind entranceKind;
if (msgType.inout) {
entranceKind = entrance ? EntranceKind.ENTRANCE_ENTER : EntranceKind.ENTRANCE_LEAVE;
} else {
entranceKind = EntranceKind.ENTRANCE_UNUSED;
}
FlightrecMessage flMsg = new FlightrecMessage(msgType, entranceKind, msg.getBuffer(), msg.getAbsoluteCurrentPos(), msg.getMaxSize() - 2);
messages.add(flMsg);
}
void parseMessage(UtilSizeBuf msg) {
int startPos = msg.getStartPos();
try {
doParseMessage(msg);
} catch (Exception e) {
e.printStackTrace();
System.out.println("Error while parsing message @" + startPos);
}
}
List<FlightrecMessage> parse() {
parseMetaRegion();
UtilByteBuffer flightData = dataRegion.cutLeft(DATA_HEADER_SIZE);
boolean flippedToEnd = false;
/*
Each message has following layout:
Opcode [2 bytes]
Data [0+ bytes]
Length of opcode + data [2 bytes]
*/
int curMsgPos = (recorderState.curMessage == 0) ? recorderState.wpos : recorderState.lastMsgBeginPos;
curMsgPos -= 2; //position of the Length field of the message
UtilSizeBuf msg = new UtilSizeBuf("flightrec_message", flightData, 0, 0);
while (true) {
if (flippedToEnd && curMsgPos <= recorderState.wpos)
break;
if (curMsgPos <= 0) { //move read pointer to the end of the data region
if (dataHeader.prevItrLastPos == -1) //wpos never reached end of the region
break;
curMsgPos = dataHeader.prevItrLastPos - 2;
flippedToEnd = true;
continue;
}
int msgLen = flightData.readUInt16(curMsgPos);
int msgStartPos = curMsgPos - msgLen;
if (msgStartPos < 0) {
throw new RuntimeException("Corrupted data region; read msgLen=" + msgLen + " at " + curMsgPos + ", but it is too large (startPos < 0)");
}
if (flippedToEnd && msgStartPos < recorderState.wpos) {
break;
}
msg.init(msgStartPos, msgLen);
parseMessage(msg);
curMsgPos = msgStartPos - 2;
}
return messages;
}
public static List<FlightrecMessage> doParse(RandomAccessFile f, HeaderScanResult metaHeader, HeaderScanResult dataHeader) throws IOException {
//read regions to byte buffers
f.seek(metaHeader.pos);
byte[] metaRegionData = new byte[metaHeader.len];
f.readFully(metaRegionData);
f.seek(dataHeader.pos);
byte[] dataRegionData = new byte[dataHeader.len];
f.readFully(dataRegionData);
UtilByteBuffer metaRegion = new UtilByteBuffer(metaRegionData);
UtilByteBuffer dataRegion = new UtilByteBuffer(dataRegionData);
List<FlightrecMessage> res = new FlightLogParser(metaRegion, dataRegion).parse();
Collections.reverse(res);
return res;
}
}

View File

@ -1,12 +0,0 @@
package org.rehlds.flightrec.logparser;
public class LogParsingException extends RuntimeException {
public LogParsingException(String message) {
super(message);
}
public LogParsingException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@ -1,14 +0,0 @@
package org.rehlds.flightrec.logparser;
public class MetaHeader {
public int numDefinitions;
public int metaRegionPos;
public MetaHeader(int numMessages, int metaRegionPos) {
this.numDefinitions = numMessages;
this.metaRegionPos = metaRegionPos;
}
public MetaHeader() {
}
}

View File

@ -1,16 +0,0 @@
package org.rehlds.flightrec.logparser;
public class RecorderState {
public int wpos;
public int lastMsgBeginPos;
public int curMessage;
public RecorderState(int wpos, int lastMsgBeginPos, int curMessage) {
this.wpos = wpos;
this.lastMsgBeginPos = lastMsgBeginPos;
this.curMessage = curMessage;
}
public RecorderState() {
}
}

View File

@ -1,72 +0,0 @@
package org.rehlds.flightrec.logtree;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.api.FlightrecMessageType;
import java.util.List;
public class FlightLogTreeBuilder {
LogTreeNodeComplex rootNode = new LogTreeNodeComplex(null, null, null);
LogTreeNodeComplex currentNode = rootNode;
void handleEnterMessage(FlightrecMessage msg) {
LogTreeNodeComplex n = new LogTreeNodeComplex(currentNode, msg, null);
currentNode.addChild(n);
currentNode = n;
}
void handleLeaveMessage(FlightrecMessage msg) {
if (currentNode == rootNode) {
currentNode.leaveMsg = msg;
rootNode = new LogTreeNodeComplex(null, null, null);
rootNode.addChild(currentNode);
currentNode.setParent(rootNode);
currentNode = rootNode;
return;
}
if (currentNode.enterMsg != null) {
FlightrecMessageType startType = currentNode.enterMsg.messageType;
FlightrecMessageType endType = msg.messageType;
if (!startType.equals(endType)) {
throw new RuntimeException("Closing message @" + Long.toHexString(msg.rawDataPos) + " has invalid type " + endType + "; expected " + startType);
}
}
currentNode.leaveMsg = msg;
currentNode = currentNode.parent;
}
void handleSimpleMessage(FlightrecMessage msg) {
LogTreeNodeLeaf leafNode = new LogTreeNodeLeaf(currentNode, msg);
currentNode.addChild(leafNode);
}
void doBuildLogTree(List<FlightrecMessage> messages) {
for (FlightrecMessage msg : messages) {
switch (msg.entranceKind) {
case ENTRANCE_ENTER:
handleEnterMessage(msg);
break;
case ENTRANCE_LEAVE:
handleLeaveMessage(msg);
break;
case ENTRANCE_UNUSED:
handleSimpleMessage(msg);
break;
default:
throw new RuntimeException("Invalid exntrance kind");
}
}
}
public static LogTreeNodeComplex buildTree(List<FlightrecMessage> messages) {
FlightLogTreeBuilder builder = new FlightLogTreeBuilder();
builder.doBuildLogTree(messages);
return builder.rootNode;
}
}

View File

@ -1,21 +0,0 @@
package org.rehlds.flightrec.logtree;
import java.util.List;
public abstract class LogTreeNode {
LogTreeNodeComplex parent;
protected LogTreeNode(LogTreeNodeComplex parent) {
this.parent = parent;
}
abstract List<? extends LogTreeNode> getChildren();
LogTreeNodeComplex getParent() {
return parent;
}
public void setParent(LogTreeNodeComplex parent) {
this.parent = parent;
}
}

View File

@ -1,36 +0,0 @@
package org.rehlds.flightrec.logtree;
import org.rehlds.flightrec.api.FlightrecMessage;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class LogTreeNodeComplex extends LogTreeNode {
public FlightrecMessage enterMsg;
public FlightrecMessage leaveMsg;
public LogTreeNodeComplex(LogTreeNodeComplex parent, FlightrecMessage enterMsg, FlightrecMessage leaveMsg) {
super(parent);
this.enterMsg = enterMsg;
this.leaveMsg = leaveMsg;
}
List<LogTreeNode> children = Collections.emptyList();
@Override
public List<LogTreeNode> getChildren() {
return children;
}
public void addChild(LogTreeNode node) {
if (children.isEmpty()) {
children = new ArrayList<>();
}
children.add(node);
}
}

View File

@ -1,20 +0,0 @@
package org.rehlds.flightrec.logtree;
import org.rehlds.flightrec.api.FlightrecMessage;
import java.util.Collections;
import java.util.List;
public class LogTreeNodeLeaf extends LogTreeNode {
public FlightrecMessage msg;
public LogTreeNodeLeaf(LogTreeNodeComplex parent, FlightrecMessage msg) {
super(parent);
this.msg = msg;
}
@Override
List<? extends LogTreeNode> getChildren() {
return Collections.emptyList();
}
}

View File

@ -1,230 +0,0 @@
package org.rehlds.flightrec.main;
import org.rehlds.flightrec.api.DecoderModule;
import org.rehlds.flightrec.api.FlightrecMessage;
import org.rehlds.flightrec.decoders.rehlds.RehldsDecodersModule;
import org.rehlds.flightrec.filescan.FileScanResult;
import org.rehlds.flightrec.filescan.FlightRecFileScanner;
import org.rehlds.flightrec.filescan.HeaderScanResult;
import org.rehlds.flightrec.logtree.FlightLogTreeBuilder;
import org.rehlds.flightrec.logtree.LogTreeNodeComplex;
import org.rehlds.flightrec.logparser.FlightLogParser;
import org.rehlds.flightrec.textlogwriter.TextLogWriter;
import org.rehlds.flightrec.util.JarUtils;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
public class FlightRecorder {
RunConfig cfg;
List<DecoderModule> decoderModules = new ArrayList<>();
public FlightRecorder(RunConfig cfg) {
this.cfg = cfg;
}
private boolean checkConfig() {
if (cfg.dumpFile == null) {
System.out.println("Dump file is not selected, please use --dump-file <filename> parameter to specify it");
return false;
}
if (cfg.outFile == null) {
cfg.outFile = new File(cfg.dumpFile.getAbsolutePath() + ".flog");
}
return true;
}
private List<FlightrecMessage> scanFile(RandomAccessFile f) throws IOException {
FileScanResult scanResult = FlightRecFileScanner.scan(f);
System.out.println("Dump file scan results: ");
for (HeaderScanResult hdr : scanResult.metaHeaders) {
System.out.print(String.format("\tMeta header @ 0x%08X; len=%d; version=%d; valid=%s", hdr.pos, hdr.len, hdr.version, "" + (hdr.error == null)));
if (hdr.error != null) {
System.out.print("; error: " + hdr.error);
}
System.out.println();
}
for (HeaderScanResult hdr : scanResult.dataHeaders) {
System.out.print(String.format("\tData header @ 0x%08X; len=%d; version=%d; valid=%s", hdr.pos, hdr.len, hdr.version, "" + (hdr.error == null)));
if (hdr.error != null) {
System.out.print("; error: " + hdr.error);
}
System.out.println();
}
HeaderScanResult validMetaHeader = null;
HeaderScanResult validDataHeader = null;
for (HeaderScanResult metaHeader : scanResult.metaHeaders) {
if (metaHeader.error != null) {
continue;
}
if (validMetaHeader != null) {
System.out.println("Multiple meta headers found, exiting");
return null;
}
validMetaHeader = metaHeader;
}
for (HeaderScanResult dataHeader : scanResult.dataHeaders) {
if (dataHeader.error != null) {
continue;
}
if (validDataHeader != null) {
System.out.println("Multiple data headers found, exiting");
return null;
}
validDataHeader = dataHeader;
}
if (validMetaHeader == null) {
System.out.println("Meta header not found, exiting");
return null;
}
if (validDataHeader == null) {
System.out.println("Data header not found, exiting");
return null;
}
return FlightLogParser.doParse(f, validMetaHeader, validDataHeader);
}
private LogTreeNodeComplex buildTree(List<FlightrecMessage> messages) {
return FlightLogTreeBuilder.buildTree(messages);
}
private boolean writeOutputFile(LogTreeNodeComplex logTreeRoot) {
TextLogWriter.decodeAndWrite(logTreeRoot, cfg.outFile, decoderModules);
System.out.println("Written decoded log to '" + cfg.outFile.getAbsolutePath() + ";");
return true;
}
public boolean run() {
registerBuiltinDecoders();
loadExternalDecoders();
if (!checkConfig()) {
return false;
}
List<FlightrecMessage> messages;
try(RandomAccessFile f = new RandomAccessFile(cfg.dumpFile, "r")) {
messages = scanFile(f);
} catch (IOException e) {
e.printStackTrace();
return false;
}
if (messages == null) {
return false;
}
System.out.println("Read " + messages.size() + " messages from '" + cfg.dumpFile.getAbsolutePath() + "'");
LogTreeNodeComplex treeRootNode = buildTree(messages);
if (treeRootNode == null) {
return false;
}
if (!writeOutputFile(treeRootNode)) {
return false;
}
return true;
}
private void loadExternalDecoders() {
File f = JarUtils.getJarFileOfClass(FlightRecorder.class);
if (f == null) {
System.out.println("Could not locate main JAR, external decoders will not be loaded");
return;
}
File extDir = new File(f.getParentFile(), "extDecoders");
if (!extDir.exists() || !extDir.isDirectory()) {
System.out.println("Directory '" + extDir.getAbsolutePath() + "' doesn't exist");
}
File[] jarFiles = extDir.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.getName().toLowerCase().endsWith(".jar");
}
});
ArrayList<URL> jarUrls = new ArrayList<>();
for (File jf : jarFiles) {
try {
jarUrls.add(jf.toURI().toURL());
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
URLClassLoader extDecodersClassloader = new URLClassLoader(jarUrls.toArray(new URL[jarUrls.size()]), this.getClass().getClassLoader());
ServiceLoader<DecoderModule> srvLoader = ServiceLoader.load(DecoderModule.class, extDecodersClassloader);
for (DecoderModule decoderModule : srvLoader) {
System.out.println("Loaded external decoder module " + decoderModule.getDescription() + " version " + decoderModule.getVersion());
decoderModules.add(decoderModule);
}
}
private void registerBuiltinDecoders() {
decoderModules.add(new RehldsDecodersModule());
}
public static void main(String args[]) {
RunConfig cfg;
try {
cfg = parseArgs(args);
} catch (IllegalArgumentException e) {
System.out.println(e.getMessage());
return;
}
new FlightRecorder(cfg).run();
}
private static RunConfig parseArgs(String args[]) {
RunConfig cfg = new RunConfig();
for (int i = 0; i < args.length; i++) {
String a = args[i];
if ("--dump-file".equals(a)) {
if (i + 1 >= args.length) {
throw new IllegalArgumentException("--dump-file should be followed by file name");
}
i++;
cfg.dumpFile = new File(args[i]);
continue;
}
if ("--out-file".equals(a)) {
if (i + 1 >= args.length) {
throw new IllegalArgumentException("--out-file should be followed by file name");
}
i++;
cfg.outFile = new File(args[i]);
continue;
}
throw new IllegalArgumentException("Invalid command line parameter: '" + a + "'");
}
return cfg;
}
}

View File

@ -1,8 +0,0 @@
package org.rehlds.flightrec.main;
import java.io.File;
public class RunConfig {
public File dumpFile;
public File outFile;
}

View File

@ -1,185 +0,0 @@
package org.rehlds.flightrec.textlogwriter;
import org.rehlds.flightrec.api.*;
import org.rehlds.flightrec.logtree.LogTreeNode;
import org.rehlds.flightrec.logtree.LogTreeNodeComplex;
import org.rehlds.flightrec.logtree.LogTreeNodeLeaf;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class TextLogWriter {
Writer writer;
List<DecoderModule> decoderModules;
int indent;
HashMap<Integer, String> indents = new HashMap<>();
Map<FlightrecMessageType, MessageDecoder> decodersByMsgType = new HashMap<>();
public TextLogWriter(Writer writer, List<DecoderModule> decoderModules) {
this.writer = writer;
this.decoderModules = decoderModules;
}
MessageDecoder lookupDecoder(FlightrecMessageType msgType) {
for (DecoderModule dm : decoderModules) {
MessageDecoder d = dm.lookupDecoder(msgType);
if (d != null) {
return d;
}
}
return null;
}
MessageDecoder getDecoder(FlightrecMessage message) {
FlightrecMessageType msgType = message.messageType;
if (!decodersByMsgType.containsKey(msgType)) {
decodersByMsgType.put(msgType, lookupDecoder(msgType));
}
return decodersByMsgType.get(msgType);
}
DecodedExtraData tryDecode(FlightrecMessage message) {
MessageDecoder decoder = getDecoder(message);
if (decoder == null) {
return null;
}
return decoder.decode(message);
}
String escapeString(String s) {
return s.replace("\"", "\\\"")
.replace("'", "\\'")
.replace("\n", "\\n")
.replace("\r", "\\r");
}
String generateIndent() {
String res = indents.get(indent);
if (res != null) {
return res;
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < indent; i++) {
sb.append(" ");
}
res = sb.toString();
indents.put(indent, res);
return res;
}
void writeExtraData(StringBuilder sb, DecodedExtraData extraData) {
boolean first = true;
for (ImmutablePair<String, String> kv : extraData.data) {
if (first) {
first = false;
} else {
sb.append(", ");
}
sb.append(kv.first).append(": '").append(escapeString(kv.second)).append("'");
}
}
String prepareMessageText(FlightrecMessage msg) {
StringBuilder sb = new StringBuilder();
sb.append(generateIndent());
switch (msg.entranceKind) {
case ENTRANCE_ENTER:
sb.append(">>");
break;
case ENTRANCE_LEAVE:
sb.append("<<");
break;
case ENTRANCE_UNUSED:
sb.append("--");
break;
default:
throw new RuntimeException("Invalid entrance kind " + msg.entranceKind);
}
FlightrecMessageType msgType = msg.messageType;
sb.append(" ").append(msgType.module).append(".").append(msgType.message).append(":").append(msgType.version).append(" ");
DecodedExtraData extraData = tryDecode(msg);
if (extraData != null) {
writeExtraData(sb, extraData);
} else {
sb.append("undecoded[");
boolean firstByte = true;
for (int i = msg.rawDataPos; i < msg.rawDataLen + msg.rawDataPos; i++) {
if (firstByte) {
firstByte = false;
} else {
sb.append(" ");
}
sb.append(String.format("%02X", msg.rawData[i] & 0xFF));
}
sb.append("]");
}
sb.append("\n");
return sb.toString();
}
void writeMessage(FlightrecMessage msg) throws IOException {
String text = prepareMessageText(msg);
writer.write(text);
}
void writeLeafNode(LogTreeNodeLeaf node) throws IOException {
writeMessage(node.msg);
}
void writeComplexNode(LogTreeNodeComplex node) throws IOException {
if (node.enterMsg != null) {
writeMessage(node.enterMsg);
} else {
writer.write(generateIndent() + ">> [Unknown]\n");
}
indent++;
writeNodes(node.getChildren());
indent--;
if (node.leaveMsg != null) {
writeMessage(node.leaveMsg);
} else {
writer.write(generateIndent() + "<< [Unknown]\n");
}
}
void writeNodes(List<LogTreeNode> nodes) throws IOException {
for (LogTreeNode node : nodes) {
if (node instanceof LogTreeNodeComplex) {
writeComplexNode((LogTreeNodeComplex) node);
} else if (node instanceof LogTreeNodeLeaf) {
writeLeafNode((LogTreeNodeLeaf) node);
} else {
throw new RuntimeException("Invalid node class " + node.getClass().getName());
}
}
}
public static void decodeAndWrite(LogTreeNodeComplex rootNode, File outFile, List<DecoderModule> decoderModules) {
try (FileWriter fw = new FileWriter(outFile)) {
TextLogWriter logWriter = new TextLogWriter(fw, decoderModules);
logWriter.writeNodes(rootNode.getChildren());
} catch (IOException e) {
throw new RuntimeException("Failed to open/write file '" + outFile + "': " + e.getMessage(), e);
}
}
}

View File

@ -1,31 +0,0 @@
package org.rehlds.flightrec.util;
import java.io.File;
import java.net.URL;
public class JarUtils {
public static File getJarFileOfClass(Class c) {
String classFileName = c.getName().replace('.', '/') + ".class";
ClassLoader classLoader = c.getClassLoader();
if (classLoader == null) {
classLoader = JarUtils.class.getClassLoader();
}
URL url = classLoader.getResource(classFileName);
if (url == null) {
return null;
}
String strUrl = url.toString();
if (!strUrl.startsWith("jar:file:/")) {
return null;
}
int jarSeparator = strUrl.indexOf('!');
if (jarSeparator == -1) {
return null;
}
String jarFilePath = strUrl.substring("jar:file:/".length(), jarSeparator);
return new File(jarFilePath);
}
}

View File

@ -1,181 +0,0 @@
package org.rehlds.flightrec.logtree
import org.junit.Test
import org.rehlds.flightrec.api.EntranceKind
import org.rehlds.flightrec.api.FlightrecMessage
import org.rehlds.flightrec.api.FlightrecMessageType
class FlightLogTreeBuilderTest {
static final FlightrecMessageType hierarchyMsgType1 = new FlightrecMessageType('test', 'hmsg1', 1, true);
static final FlightrecMessageType hierarchyMsgType2 = new FlightrecMessageType('test', 'hmsg1', 1, true);
static final FlightrecMessageType flatMsgType1 = new FlightrecMessageType('test', 'flatmsg1', 1, true);
static final FlightrecMessageType flatMsgType2 = new FlightrecMessageType('test', 'flatmsg2', 1, true);
static FlightrecMessage enterMsg(FlightrecMessageType type) {
return new FlightrecMessage(type, EntranceKind.ENTRANCE_ENTER, null, 0, 0);
}
static FlightrecMessage leaveMsg(FlightrecMessageType type) {
return new FlightrecMessage(type, EntranceKind.ENTRANCE_LEAVE, null, 0, 0);
}
static FlightrecMessage flatMsg(FlightrecMessageType type) {
return new FlightrecMessage(type, EntranceKind.ENTRANCE_UNUSED, null, 0, 0);
}
@Test
void 'decode 2 flat messages'() {
def messages = [flatMsg(flatMsgType1), flatMsg(flatMsgType2)]
def rootNode = FlightLogTreeBuilder.buildTree(messages)
assert rootNode.children.size() == 2
assert rootNode.children[0].msg == messages[0];
assert rootNode.children[1].msg == messages[1];
assert rootNode.enterMsg == null
assert rootNode.leaveMsg == null
}
@Test
void 'decode 2 empty hierarchy msgs'() {
def messages = [
enterMsg(hierarchyMsgType1), leaveMsg(hierarchyMsgType1),
enterMsg(hierarchyMsgType2), leaveMsg(hierarchyMsgType2)
]
def rootNode = FlightLogTreeBuilder.buildTree(messages)
assert rootNode.children.size() == 2
rootNode.children[0].enterMsg == messages[0]
rootNode.children[0].leaveMsg == messages[1]
assert rootNode.children[0].children.empty
rootNode.children[1].enterMsg == messages[2]
rootNode.children[1].leaveMsg == messages[3]
assert rootNode.children[1].children.empty
assert rootNode.enterMsg == null
assert rootNode.leaveMsg == null
}
@Test
void 'decode 2 hierarchy messages with flat payload'() {
def messages = [
enterMsg(hierarchyMsgType1), flatMsg(flatMsgType1), leaveMsg(hierarchyMsgType1),
enterMsg(hierarchyMsgType2), flatMsg(flatMsgType2), leaveMsg(hierarchyMsgType2)
]
def rootNode = FlightLogTreeBuilder.buildTree(messages)
assert rootNode.children.size() == 2
rootNode.children[0].enterMsg == messages[0]
rootNode.children[0].leaveMsg == messages[2]
assert rootNode.children[0].children.size() == 1
assert rootNode.children[0].children[0].msg == messages[1]
rootNode.children[1].enterMsg == messages[3]
rootNode.children[1].leaveMsg == messages[5]
assert rootNode.children[1].children.size() == 1
assert rootNode.children[1].children[0].msg == messages[4]
assert rootNode.enterMsg == null
assert rootNode.leaveMsg == null
}
@Test
void 'decode hierarchical message with mixed payload'() {
def messages = [
flatMsg(flatMsgType2),
enterMsg(hierarchyMsgType1),
flatMsg(flatMsgType1),
enterMsg(hierarchyMsgType2),
flatMsg(flatMsgType2),
leaveMsg(hierarchyMsgType2),
flatMsg(flatMsgType2),
leaveMsg(hierarchyMsgType1),
flatMsg(flatMsgType1)
]
def rootNode = FlightLogTreeBuilder.buildTree(messages)
assert rootNode.children.size() == 3
assert rootNode.enterMsg == null
assert rootNode.leaveMsg == null
assert rootNode.children[0].msg == messages[0]
assert rootNode.children[2].msg == messages[8]
assert rootNode.children[1].enterMsg == messages[1]
assert rootNode.children[1].leaveMsg == messages[7]
assert rootNode.children[1].children.size() == 3
assert rootNode.children[1].children[0].msg == messages[2]
assert rootNode.children[1].children[2].msg == messages[6]
assert rootNode.children[1].children[1].enterMsg == messages[3]
assert rootNode.children[1].children[1].leaveMsg == messages[5]
assert rootNode.children[1].children[1].children.size() == 1
assert rootNode.children[1].children[1].children[0].msg == messages[4]
}
@Test
void 'decode hierarchical msg with flat payload and missing start'() {
def messages = [
flatMsg(flatMsgType1),
leaveMsg(hierarchyMsgType1),
flatMsg(flatMsgType2)
]
def rootNode = FlightLogTreeBuilder.buildTree(messages)
assert rootNode.children.size() == 2
assert rootNode.enterMsg == null
assert rootNode.leaveMsg == null
assert rootNode.children[0].enterMsg == null
assert rootNode.children[0].leaveMsg == messages[1]
assert rootNode.children[0].children.size() == 1
assert rootNode.children[0].children[0].msg == messages[0]
assert rootNode.children[1].msg == messages[2]
}
@Test
void 'decode empty hierarchical msg with missing start'() {
def messages = [
leaveMsg(hierarchyMsgType1),
flatMsg(flatMsgType2)
]
def rootNode = FlightLogTreeBuilder.buildTree(messages)
assert rootNode.children.size() == 2
assert rootNode.enterMsg == null
assert rootNode.leaveMsg == null
assert rootNode.children[0].enterMsg == null
assert rootNode.children[0].leaveMsg == messages[0]
assert rootNode.children[0].children.empty
assert rootNode.children[1].msg == messages[1]
}
@Test
void 'decode hierarchical msg with flat payload and missing end'() {
def messages = [
flatMsg(flatMsgType1),
enterMsg(hierarchyMsgType1),
flatMsg(flatMsgType2)
]
def rootNode = FlightLogTreeBuilder.buildTree(messages)
assert rootNode.children.size() == 2
assert rootNode.enterMsg == null
assert rootNode.leaveMsg == null
assert rootNode.children[0].msg == messages[0]
assert rootNode.children[1].enterMsg == messages[1]
assert rootNode.children[1].leaveMsg == null
assert rootNode.children[1].children.size() == 1
assert rootNode.children[1].children[0].msg == messages[2]
}
}

View File

@ -1,43 +0,0 @@
package com.google.cloud;
import org.junit.Test;
import static org.junit.Assert.*;
public class Crc32cTest {
static class TestData {
public String src;
public long hash;
TestData(String src, long hash) {
this.src = src;
this.hash = hash;
}
}
@Test
public void testCrc32c() {
TestData testData[] = {
new TestData("a", 0x93AD1061L),
new TestData("ab", 0x13C35EE4L),
new TestData("abc", 0x562F9CCDL),
new TestData("abcd", 0xDAAF41F6L),
new TestData("abcde", 0x8122A0A2L),
new TestData("abcdef", 0x0496937BL),
new TestData("abcdefg", 0x5D199E2CL),
new TestData("abcdefgh", 0x86BC933DL),
new TestData("abcdefghi", 0x9639F15FL),
new TestData("abcdefghij", 0x0584645CL),
};
for (TestData t : testData) {
Crc32c crc32c = new Crc32c();
crc32c.update(t.src.getBytes());
long cksum = crc32c.getValue();
assertEquals(t.hash, cksum);
}
}
}

View File

@ -1,74 +0,0 @@
apply plugin: 'java'
apply plugin: 'maven-publish'
group = 'org.rehlds.flightrec'
version = rootProject.version
sourceCompatibility = '1.7'
targetCompatibility = '1.7'
repositories {
mavenCentral()
}
dependencies {
testCompile "junit:junit:4.12"
}
publishing {
publications {
maven(MavenPublication) {
version project.version
artifactId 'decoder-api'
artifact jar
pom.withXml {
asNode().children().last() + {
resolveStrategy = DELEGATE_FIRST
name 'decoder-api'
description project.description
//url github
//scm {
// url "${github}.git"
// connection "scm:git:${github}.git"
//}
/*
licenses {
license {
name 'The Apache Software License, Version 2.0'
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
distribution 'repo'
}
}
developers {
developer {
id 'dreamstalker'
name 'dreamstalker'
}
}
*/
}
}
}
}
}
publishing {
repositories {
maven {
if (project.version.contains('dev')) {
url "http://nexus.rehlds.org/nexus/content/repositories/rehlds-dev/"
} else {
url "http://nexus.rehlds.org/nexus/content/repositories/rehlds-releases/"
}
credentials {
username rootProject.repoCreds.getProperty('username')
password rootProject.repoCreds.getProperty('password')
}
}
}
}
tasks.withType(AbstractCompile) {
options.encoding = 'UTF-8'
}

View File

@ -1,28 +0,0 @@
package org.rehlds.flightrec.api;
public class DecodedExtraData {
public ImmutablePair<String, String>[] data;
public DecodedExtraData(ImmutablePair<String, String>[] data) {
this.data = data;
}
public static final DecodedExtraData EMPTY = new DecodedExtraData(new ImmutablePair[0]);
public static DecodedExtraData create(String... args) {
if ((args.length % 2) == 1) {
throw new RuntimeException("DecodedExtraData.create: number of arguments must be even");
}
int numPairs = args.length / 2;
DecodedExtraData res = new DecodedExtraData(new ImmutablePair[numPairs]);
for (int i = 0; i < numPairs; i++) {
res.data[i] = new ImmutablePair<>(args[i * 2], args[i * 2 + 1]);
}
return res;
}
}

View File

@ -1,7 +0,0 @@
package org.rehlds.flightrec.api;
public interface DecoderModule {
public MessageDecoder lookupDecoder(FlightrecMessageType msgType);
public String getDescription();
public String getVersion();
}

View File

@ -1,7 +0,0 @@
package org.rehlds.flightrec.api;
public enum EntranceKind {
ENTRANCE_ENTER,
ENTRANCE_LEAVE,
ENTRANCE_UNUSED,
}

View File

@ -1,35 +0,0 @@
package org.rehlds.flightrec.api;
import org.rehlds.flightrec.api.util.UtilByteBuffer;
import org.rehlds.flightrec.api.util.UtilSizeBuf;
public class FlightrecMessage {
public final FlightrecMessageType messageType;
public final EntranceKind entranceKind;
public final byte[] rawData;
public final int rawDataPos;
public final int rawDataLen;
DecodedExtraData decodedData;
public FlightrecMessage(FlightrecMessageType messageType, EntranceKind entranceKind, byte[] rawData, int rawDataOffset, int rawDataLen) {
this.messageType = messageType;
this.entranceKind = entranceKind;
this.rawData = rawData;
this.rawDataPos = rawDataOffset;
this.rawDataLen = rawDataLen;
}
public UtilSizeBuf getDataSizebuf() {
return new UtilSizeBuf("msg: '" + messageType + "' @" + rawDataPos, new UtilByteBuffer(rawData), rawDataPos, rawDataLen);
}
public boolean isEnterMessage() {
return (entranceKind == EntranceKind.ENTRANCE_ENTER);
}
public boolean isLeaveMessage() {
return (entranceKind == EntranceKind.ENTRANCE_LEAVE);
}
}

View File

@ -1,22 +0,0 @@
package org.rehlds.flightrec.api;
public class FlightrecMessageDef {
public final FlightrecMessageType msgType;
public final int opcode;
public FlightrecMessageDef(String module, String message, long version, boolean inout, int opcode) {
msgType = new FlightrecMessageType(module, message, version, inout);
this.opcode = opcode;
}
@Override
public String toString() {
return "FlightrecMessageDef{" +
"module='" + msgType.module + '\'' +
", message='" + msgType.message + '\'' +
", version=" + msgType.version +
", inout=" + msgType.inout +
", opcode=" + opcode +
'}';
}
}

View File

@ -1,49 +0,0 @@
package org.rehlds.flightrec.api;
public class FlightrecMessageType {
public final String module;
public final String message;
public final long version;
public final boolean inout;
public FlightrecMessageType(String module, String message, long version, boolean inout) {
this.module = module;
this.message = message;
this.version = version;
this.inout = inout;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FlightrecMessageType that = (FlightrecMessageType) o;
if (inout != that.inout) return false;
if (version != that.version) return false;
if (!message.equals(that.message)) return false;
if (!module.equals(that.module)) return false;
return true;
}
@Override
public int hashCode() {
int result = module.hashCode();
result = 31 * result + message.hashCode();
result = 31 * result + (int) (version ^ (version >>> 32));
result = 31 * result + (inout ? 1 : 0);
return result;
}
@Override
public String toString() {
return "FlightrecMessageType{" +
"module='" + module + '\'' +
", message='" + message + '\'' +
", version=" + version +
", inout=" + inout +
'}';
}
}

View File

@ -1,11 +0,0 @@
package org.rehlds.flightrec.api;
public class ImmutablePair<T, U> {
public final T first;
public final U second;
public ImmutablePair(T first, U second) {
this.first = first;
this.second = second;
}
}

View File

@ -1,6 +0,0 @@
package org.rehlds.flightrec.api;
public interface MessageDecoder {
FlightrecMessageType getMessageType();
DecodedExtraData decode(FlightrecMessage msg);
}

View File

@ -1,35 +0,0 @@
package org.rehlds.flightrec.api;
import java.util.HashMap;
import java.util.Map;
public class SimpleDecoderModule implements DecoderModule {
Map<FlightrecMessageType, MessageDecoder> decoders = new HashMap<>();
public final String description;
public final String version;
public SimpleDecoderModule(String description, String version) {
this.description = description;
this.version = version;
}
@Override
public MessageDecoder lookupDecoder(FlightrecMessageType msgType) {
return decoders.get(msgType);
}
@Override
public String getDescription() {
return description;
}
@Override
public String getVersion() {
return version;
}
public void registerDecoder(MessageDecoder msgDecoder) {
decoders.put(msgDecoder.getMessageType(), msgDecoder);
}
}

View File

@ -1,8 +0,0 @@
package org.rehlds.flightrec.api.util;
import java.nio.charset.Charset;
public class Globals {
public static final Charset UTF8 = Charset.forName("UTF-8");
}

View File

@ -1,10 +0,0 @@
package org.rehlds.flightrec.api.util;
public class SizebufOverflowException extends RuntimeException {
public final String sizebufName;
public SizebufOverflowException(String sizebufName) {
super(sizebufName + " overflowed");
this.sizebufName = sizebufName;
}
}

View File

@ -1,79 +0,0 @@
package org.rehlds.flightrec.api.util;
public class UtilByteBuffer {
byte data[];
public UtilByteBuffer(byte[] data) {
this.data = data;
}
public byte[] getData() {
return data;
}
public UtilByteBuffer cutLeft(int newStart) {
byte[] newData = new byte[data.length - newStart];
System.arraycopy(data, newStart, newData, 0, data.length - newStart);
return new UtilByteBuffer(newData);
}
public int getDataLength() {
return data.length;
}
public int readUInt8(int pos) {
return data[pos] & 0xFF;
}
public boolean readBool(int pos) {
return data[pos] != 0;
}
public int readUInt16(int pos) {
return (data[pos] & 0xFF) | ((data[pos + 1] & 0xFF) << 8);
}
public long readUInt32(int pos) {
return (data[pos] & 0xFF) | ((data[pos + 1] & 0xFF) << 8) | ((data[pos + 2] & 0xFF) << 16) | ((long)(data[pos + 3] & 0xFF) << 24);
}
public int readInt32(int pos) {
return (data[pos] & 0xFF) | ((data[pos + 1] & 0xFF) << 8) | ((data[pos + 2] & 0xFF) << 16) | ((data[pos + 3] & 0xFF) << 24);
}
public long readInt64(int pos) {
long lowBits = readUInt32(pos);
long highBits = readUInt32(pos + 4);
return lowBits | (highBits << 32);
}
public double readDouble(int pos) {
long bits = readInt64(pos);
return Double.longBitsToDouble(bits);
}
public float readFloat(int pos) {
int bits = readInt32(pos);
return Float.intBitsToFloat(bits);
}
public String readString(int pos) {
return readString(pos, data.length - pos, true);
}
public String readString(int pos, int maxSize, boolean errorOnMaxSizeHit) {
int iMax = Math.min(data.length, pos + maxSize);
for (int i = pos; i < iMax; i++) {
if (data[i] == 0) {
return new String(data, pos, i - pos, Globals.UTF8);
}
}
if (errorOnMaxSizeHit) {
return null;
}
return new String(data, pos, iMax - pos, Globals.UTF8);
}
}

View File

@ -1,159 +0,0 @@
package org.rehlds.flightrec.api.util;
public class UtilSizeBuf {
String name;
UtilByteBuffer buf;
int startPos;
int maxSize;
int curPos;
public UtilSizeBuf(String name, UtilByteBuffer buf, int startPos, int maxSize) {
this.name = name;
this.buf = buf;
this.startPos = startPos;
this.maxSize = maxSize;
curPos = 0;
}
public UtilSizeBuf(String name, UtilByteBuffer buf) {
this(name, buf, 0, buf.getDataLength());
}
public void init(int startPos, int maxSize) {
this.startPos = startPos;
this.maxSize = maxSize;
curPos = 0;
}
public void reset() {
this.curPos = 0;
}
public int tell() {
return curPos;
}
public int getAbsoluteCurrentPos() {
return curPos + startPos;
}
public void skip(int count) {
if (curPos + count > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
curPos += count;
}
public int readUInt8() {
if (curPos + 1 > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
int pos = curPos;
curPos++;
return buf.readUInt8(pos + this.startPos);
}
public boolean readBool() {
if (curPos + 1 > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
int pos = curPos;
curPos++;
return buf.readBool(pos + this.startPos);
}
public int readUInt16() {
if (curPos + 2 > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
int pos = curPos;
curPos += 2;
return buf.readUInt16(pos + this.startPos);
}
public long readUInt32() {
if (curPos + 4 > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
int pos = curPos;
curPos += 4;
return buf.readUInt32(pos + this.startPos);
}
public int readInt32() {
if (curPos + 4 > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
int pos = curPos;
curPos += 4;
return buf.readInt32(pos + this.startPos);
}
public long readInt64() {
if (curPos + 8 > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
int pos = curPos;
curPos += 8;
return buf.readInt64(pos + this.startPos);
}
public double readDouble() {
if (curPos + 8 > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
int pos = curPos;
curPos += 8;
return buf.readDouble(pos + this.startPos);
}
public float readFloat() {
if (curPos + 4 > maxSize) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
int pos = curPos;
curPos += 4;
return buf.readFloat(pos + this.startPos);
}
public String readString() {
String s = buf.readString(curPos + this.startPos);
if (s == null) {
curPos = maxSize;
throw new SizebufOverflowException(name);
}
curPos += s.getBytes(Globals.UTF8).length + 1;
return s;
}
public int getStartPos() {
return startPos;
}
public int getMaxSize() {
return maxSize;
}
public byte[] getBuffer() {
return buf.getData();
}
}

View File

@ -1,127 +0,0 @@
package org.rehlds.flightrec.util;
import org.junit.Test;
import org.rehlds.flightrec.api.util.UtilByteBuffer;
import static org.junit.Assert.*;
public class UtilByteBufferTest {
@Test
public void testReadUInt8() throws Exception {
byte data[] = { 0x10, 0x00, 0x7F, (byte)0x80, (byte)0xFF };
UtilByteBuffer bb = new UtilByteBuffer(data);
assertEquals(0x10, bb.readUInt8(0));
assertEquals(0x00, bb.readUInt8(1));
assertEquals(0x7F, bb.readUInt8(2));
assertEquals(0x80, bb.readUInt8(3));
assertEquals(0xFF, bb.readUInt8(4));
}
@Test
public void testReadUInt16() throws Exception {
byte data[] = { 0x10, 0x00, 0x7F, (byte)0x80, (byte)0xFF, 0x00 };
UtilByteBuffer bb = new UtilByteBuffer(data);
assertEquals(0x10, bb.readUInt16(0));
assertEquals(0x7F00, bb.readUInt16(1));
assertEquals(0x807F, bb.readUInt16(2));
assertEquals(0xFF80, bb.readUInt16(3));
assertEquals(0x00FF, bb.readUInt16(4));
}
@Test
public void testReadUInt32() throws Exception {
byte data[] = { 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x7F, 0x00, 0x00, 0x00, (byte)0xFF, 0x00, 0x00, 0x00 };
UtilByteBuffer bb = new UtilByteBuffer(data);
assertEquals(0x10000000, bb.readUInt32(0));
assertEquals(0x100000, bb.readUInt32(1));
assertEquals(0x1000, bb.readUInt32(2));
assertEquals(0x10, bb.readUInt32(3));
assertEquals(0x0, bb.readUInt32(4));
assertEquals(0x7F000000, bb.readUInt32(5));
assertEquals(0x7F0000, bb.readUInt32(6));
assertEquals(0x7F00, bb.readUInt32(7));
assertEquals(0x7F, bb.readUInt32(8));
assertEquals(0xFF000000L, bb.readUInt32(9));
assertEquals(0xFF0000, bb.readUInt32(10));
assertEquals(0xFF00, bb.readUInt32(11));
assertEquals(0xFF, bb.readUInt32(12));
}
@Test
public void testReadInt32() throws Exception {
byte data[] = { 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x7F, 0x00, 0x00, 0x00, (byte)0xFF, 0x00, 0x00, 0x00 };
UtilByteBuffer bb = new UtilByteBuffer(data);
assertEquals(0x10000000, bb.readInt32(0));
assertEquals(0x100000, bb.readInt32(1));
assertEquals(0x1000, bb.readInt32(2));
assertEquals(0x10, bb.readInt32(3));
assertEquals(0x0, bb.readInt32(4));
assertEquals(0x7F000000, bb.readInt32(5));
assertEquals(0x7F0000, bb.readInt32(6));
assertEquals(0x7F00, bb.readInt32(7));
assertEquals(0x7F, bb.readInt32(8));
assertEquals((int)0xFF000000, bb.readInt32(9));
assertEquals(0xFF0000, bb.readInt32(10));
assertEquals(0xFF00, bb.readInt32(11));
assertEquals(0xFF, bb.readInt32(12));
}
@Test
public void testReadInt64() throws Exception {
byte data[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, (byte)0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };
UtilByteBuffer bb = new UtilByteBuffer(data);
assertEquals(0x1000000000000000L, bb.readInt64(0));
assertEquals(0x10000000000000L, bb.readInt64(1));
assertEquals(0x100000000000L, bb.readInt64(2));
assertEquals(0x1000000000L, bb.readInt64(3));
assertEquals(0x10000000L, bb.readInt64(4));
assertEquals(0x100000L, bb.readInt64(5));
assertEquals(0x1000L, bb.readInt64(6));
assertEquals(0x10L, bb.readInt64(7));
assertEquals(0x00L, bb.readInt64(8));
assertEquals(0x7F00000000000000L, bb.readInt64(9));
assertEquals(0x7F000000000000L, bb.readInt64(10));
assertEquals(0x7F0000000000L, bb.readInt64(11));
assertEquals(0x7F00000000L, bb.readInt64(12));
assertEquals(0x7F000000L, bb.readInt64(13));
assertEquals(0x7F0000L, bb.readInt64(14));
assertEquals(0x7F00L, bb.readInt64(15));
assertEquals(0x7FL, bb.readInt64(16));
assertEquals(0xFF00000000000000L, bb.readInt64(17));
assertEquals(0xFF000000000000L, bb.readInt64(18));
assertEquals(0xFF0000000000L, bb.readInt64(19));
assertEquals(0xFF00000000L, bb.readInt64(20));
assertEquals(0xFF000000L, bb.readInt64(21));
assertEquals(0xFF0000L, bb.readInt64(22));
assertEquals(0xFF00L, bb.readInt64(23));
assertEquals(0xFFL, bb.readInt64(24));
}
@Test
public void testReadString() throws Exception {
byte data[] = { 0x00, 0x41, 0x00, 0x41, 0x42, 0x00, 0x50, 0x75, 0x62, 0x6C, 0x69, 0x63, 0x69, 0x74, (byte)0xC3, (byte)0xA9, 0x00, 0x41, 0x42 };
UtilByteBuffer bb = new UtilByteBuffer(data);
assertEquals("", bb.readString(0));
assertEquals("A", bb.readString(1));
assertEquals("", bb.readString(2));
assertEquals("AB", bb.readString(3));
assertEquals("Publicité", bb.readString(6));
assertNull(bb.readString(17));
assertEquals("Public", bb.readString(6, 6, false));
assertNull(bb.readString(6, 6, true));
assertEquals("AB", bb.readString(17, 2, false));
assertNull(bb.readString(17, 2, true));
}
}

View File

@ -1,4 +0,0 @@
@echo off
call "%VS140COMNTOOLS%vcvarsqueryregistry.bat"
echo %UniversalCRTSdkDir%
echo %UCRTVersion%

View File

@ -1,3 +0,0 @@
majorVersion=3
minorVersion=8
maintenanceVersion=0

Binary file not shown.

View File

@ -1,6 +0,0 @@
#Sat Jun 06 16:31:05 BRT 2015
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-2.4-all.zip

164
gradlew vendored
View File

@ -1,164 +0,0 @@
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# For Cygwin, ensure paths are in UNIX format before anything is touched.
if $cygwin ; then
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >&-
APP_HOME="`pwd -P`"
cd "$SAVED" >&-
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"

90
gradlew.bat vendored
View File

@ -1,90 +0,0 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -1,186 +0,0 @@
import org.doomedsociety.gradlecpp.GradleCppUtils
import org.apache.commons.io.FilenameUtils
void _copyFileToDir(String from, String to) {
if (!project.file(from).exists()) {
println 'WARNING: Could not find: ' + from;
return;
}
if (!project.file(to).exists()) {
project.file(to).mkdirs();
}
def dst = new File(project.file(to), FilenameUtils.getName(from))
GradleCppUtils.copyFile(project.file(from), dst, false)
}
void _copyFile(String from, String to) {
if (!project.file(from).exists()) {
println 'WARNING: Could not find: ' + from;
return;
}
GradleCppUtils.copyFile(project.file(from), project.file(to), false)
}
task publishPrepareFiles {
dependsOn ':flightrec/decoder:uberjar'
doLast {
def pubRootDir = project.file('publish/publishRoot')
if (pubRootDir.exists()) {
if (!pubRootDir.deleteDir()) {
throw new RuntimeException("Failed to delete ${pubRootDir}")
}
}
pubRootDir.mkdirs()
project.file('publish/publishRoot/bin/win32/valve/dlls').mkdirs()
project.file('publish/publishRoot/bin/linux32/valve/dlls').mkdirs()
// bugfixed binaries
_copyFile('publish/releaseRehldsFixes/swds.dll', 'publish/publishRoot/bin/win32/swds.dll')
_copyFile('publish/releaseRehldsFixes/engine_i486.so', 'publish/publishRoot/bin/linux32/engine_i486.so')
// dedicated binaries
_copyFile('publish/hlds.exe', 'publish/publishRoot/bin/win32/hlds.exe')
_copyFile('publish/hlds_linux', 'publish/publishRoot/bin/linux32/hlds_linux')
// HLTV binaries
_copyFile('publish/hltv.exe', 'publish/publishRoot/bin/win32/hltv.exe')
_copyFile('publish/hltv', 'publish/publishRoot/bin/linux32/hltv')
_copyFile('publish/core.dll', 'publish/publishRoot/bin/win32/core.dll')
_copyFile('publish/core.so', 'publish/publishRoot/bin/linux32/core.so')
_copyFile('publish/proxy.dll', 'publish/publishRoot/bin/win32/proxy.dll')
_copyFile('publish/proxy.so', 'publish/publishRoot/bin/linux32/proxy.so')
_copyFile('publish/demoplayer.dll', 'publish/publishRoot/bin/win32/demoplayer.dll')
_copyFile('publish/demoplayer.so', 'publish/publishRoot/bin/linux32/demoplayer.so')
_copyFile('publish/director.dll', 'publish/publishRoot/bin/win32/valve/dlls/director.dll')
_copyFile('publish/director.so', 'publish/publishRoot/bin/linux32/valve/dlls/director.so')
// FileSystem binaries
_copyFile('publish/filesystem_stdio.dll', 'publish/publishRoot/bin/win32/filesystem_stdio.dll')
_copyFile('publish/filesystem_stdio.so', 'publish/publishRoot/bin/linux32/filesystem_stdio.so')
// hlsdk
project.file('publish/publishRoot/hlsdk').mkdirs()
copy {
from 'rehlds/common'
into 'publish/publishRoot/hlsdk/common'
}
copy {
from 'rehlds/dlls'
into 'publish/publishRoot/hlsdk/dlls'
}
copy {
from 'rehlds/pm_shared'
into 'publish/publishRoot/hlsdk/pm_shared'
}
copy {
from 'rehlds/public'
into 'publish/publishRoot/hlsdk/public'
exclude '**/rehlds/*'
}
copy {
from 'rehlds/public/rehlds'
into 'publish/publishRoot/hlsdk/engine'
}
// flightrecorder
def flightRecJarTask = project(':flightrec/decoder').tasks.getByName('uberjar')
println flightRecJarTask
println flightRecJarTask.class.name
File flightRecJarFile = flightRecJarTask.archivePath
project.file('publish/publishRoot/flighrec').mkdirs()
GradleCppUtils.copyFile(flightRecJarFile, project.file('publish/publishRoot/flighrec/decoder.jar'), false)
copy {
from new File(project(':flightrec/decoder').projectDir, 'pub')
into 'publish/publishRoot/flighrec'
}
}
}
task publishPackage(type: Zip, dependsOn: 'publishPrepareFiles') {
baseName = "rehlds-dist-${project.version}"
destinationDir file('publish')
from 'publish/publishRoot'
}
publishing {
publications {
maven(MavenPublication) {
version project.version
artifact publishPackage
pom.withXml {
asNode().children().last() + {
resolveStrategy = DELEGATE_FIRST
name project.name
description project.description
properties {
commitDate project.ext.rehldsVersionInfo.commitDate
commitSHA project.ext.rehldsVersionInfo.commitSHA
}
//url github
//scm {
// url "${github}.git"
// connection "scm:git:${github}.git"
//}
/*
licenses {
license {
name 'The Apache Software License, Version 2.0'
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
distribution 'repo'
}
}
developers {
developer {
id 'dreamstalker'
name 'dreamstalker'
}
}
*/
}
}
}
}
}
Properties repoCreds = new Properties()
project.ext.repoCreds = repoCreds
if (file('repo_creds.properties').exists()) {
println 'Loading maven repo credentials'
file('repo_creds.properties').withReader('UTF-8', { Reader r ->
repoCreds.load(r)
})
}
publishing {
repositories {
maven {
if (project.version.contains('dev')) {
url "http://nexus.rehlds.org/nexus/content/repositories/rehlds-dev/"
} else {
url "http://nexus.rehlds.org/nexus/content/repositories/rehlds-releases/"
}
credentials {
username repoCreds.getProperty('username')
password repoCreds.getProperty('password')
}
}
}
}
task doPublish {
dependsOn 'publishPackage'
if (repoCreds.getProperty('username') && repoCreds.getProperty('password')) {
dependsOn 'publish'
dependsOn ':flightrec/decoder_api:publish'
}
}

View File

@ -1,173 +0,0 @@
import org.doomedsociety.gradlecpp.cfg.ToolchainConfigUtils
import org.doomedsociety.gradlecpp.msvc.MsvcToolchainConfig
import org.doomedsociety.gradlecpp.toolchain.icc.Icc
import org.doomedsociety.gradlecpp.toolchain.icc.IccCompilerPlugin
import org.doomedsociety.gradlecpp.gcc.GccToolchainConfig
import org.doomedsociety.gradlecpp.GradleCppUtils
import org.gradle.nativeplatform.NativeExecutableSpec
import org.gradle.nativeplatform.NativeExecutableBinarySpec
import org.gradle.nativeplatform.toolchain.VisualCpp
apply plugin: 'cpp'
apply plugin: 'windows-resources'
apply plugin: IccCompilerPlugin
apply plugin: GccCompilerPlugin
List<Task> getRcCompileTasks(NativeBinarySpec binary)
{
def linkTask = GradleCppUtils.getLinkTask(binary)
def res = linkTask.taskDependencies.getDependencies(linkTask).findAll { Task t -> t instanceof WindowsResourceCompile }
return res as List
}
void setupToolchain(NativeBinarySpec b) {
boolean useGcc = project.hasProperty("useGcc")
def cfg = rootProject.createToolchainConfig(b);
cfg.projectInclude(project, '/..', '/../..', '/src', '/../../common', '/../../engine', '/../../public', '/../../public/rehlds');
cfg.singleDefines 'USE_BREAKPAD_HANDLER', 'HLTV', 'LAUNCHER_FIXES', '_CONSOLE'
if (cfg instanceof MsvcToolchainConfig) {
cfg.compilerOptions.pchConfig = new MsvcToolchainConfig.PrecompiledHeadersConfig(
enabled: true,
pchHeader: 'precompiled.h',
pchSourceSet: 'hltv_pch'
);
cfg.singleDefines('_CRT_SECURE_NO_WARNINGS');
cfg.linkerOptions.args('/SUBSYSTEM:WINDOWS,5.01');
cfg.compilerOptions.args '/Ob2', '/Oi', '/GF', '/GR-'
cfg.extraLibs "user32.lib"
}
else if (cfg instanceof GccToolchainConfig) {
if (!useGcc) {
cfg.compilerOptions.pchConfig = new GccToolchainConfig.PrecompilerHeaderOptions(
enabled: true,
pchSourceSet: 'hltv_pch'
);
}
cfg.compilerOptions.languageStandard = 'c++11'
cfg.defines([
'_strdup': 'strdup',
'_stricmp': 'strcasecmp',
'_strnicmp': 'strncasecmp',
'_vsnprintf': 'vsnprintf',
'_snprintf': 'snprintf',
]);
if (useGcc) {
// Produce code optimized for the most common IA32/AMD64/EM64T processors.
// As new processors are deployed in the marketplace, the behavior of this option will change.
cfg.compilerOptions.args '-mtune=generic', '-Wno-write-strings', '-msse3', '-flto'
} else {
cfg.compilerOptions.args '-Qoption,cpp,--treat_func_as_string_literal_cpp'
}
cfg.linkerOptions.args '-no-pie'
cfg.compilerOptions.args '-fno-rtti', '-fno-exceptions'
cfg.extraLibs 'dl'
}
ToolchainConfigUtils.apply(project, cfg, b);
}
model {
buildTypes {
release
}
platforms {
x86 {
architecture "x86"
}
}
toolChains {
visualCpp(VisualCpp) {
}
if (project.hasProperty("useGcc")) {
gcc(Gcc)
} else {
icc(Icc)
}
}
components {
hltv(NativeExecutableSpec) {
targetPlatform 'x86'
baseName 'hltv'
sources {
hltv_main(CppSourceSet) {
source {
srcDir "src"
include "**/*.cpp"
exclude "precompiled.cpp"
}
}
hltv_common(CppSourceSet) {
source {
srcDirs "../../common", "../common"
// common
include "BaseSystemModule.cpp"
include "ObjectList.cpp"
include "TokenLine.cpp"
include "textconsole.cpp"
include "minidump.cpp"
if (GradleCppUtils.windows) {
include "TextConsoleWin32.cpp"
}
else {
include "TextConsoleUnix.cpp"
}
// HLTV common
include "random.cpp"
include "common.cpp"
}
}
hltv_engine(CppSourceSet) {
source {
srcDir "../../engine"
include "mem.cpp"
}
}
hltv_pch(CppSourceSet) {
source {
srcDir "src"
include "precompiled.cpp"
}
}
rc {
source {
srcDir "msvc"
include "hltv.rc"
}
exportedHeaders {
srcDirs "msvc"
}
}
}
binaries.all {
NativeExecutableBinarySpec b -> project.setupToolchain(b)
}
}
}
}
task buildFixes {
dependsOn binaries.withType(NativeExecutableBinarySpec).matching { NativeExecutableBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
task buildRelease {
dependsOn binaries.withType(NativeExecutableBinarySpec).matching { NativeExecutableBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}

View File

@ -77,14 +77,6 @@ IF EXIST "%srcdir%\version.h" (
IF %%j==VERSION_MAINTENANCE set version_maintenance=%%k
)
)
) ELSE (
FOR /F "usebackq tokens=1,2,3,* delims==" %%i in ("%repodir%..\gradle.properties") do (
IF NOT [%%j] == [] (
IF %%i==majorVersion set version_major=%%j
IF %%i==minorVersion set version_minor=%%j
IF %%i==maintenanceVersion set version_maintenance=%%j
)
)
)
::

View File

@ -1,186 +0,0 @@
import org.doomedsociety.gradlecpp.cfg.ToolchainConfigUtils
import org.doomedsociety.gradlecpp.msvc.MsvcToolchainConfig
import org.doomedsociety.gradlecpp.toolchain.icc.Icc
import org.doomedsociety.gradlecpp.toolchain.icc.IccCompilerPlugin
import org.doomedsociety.gradlecpp.gcc.GccToolchainConfig
import org.doomedsociety.gradlecpp.GradleCppUtils
import org.gradle.nativeplatform.NativeBinarySpec
import org.gradle.nativeplatform.NativeLibrarySpec
import org.gradle.nativeplatform.toolchain.VisualCpp
apply plugin: 'cpp'
apply plugin: IccCompilerPlugin
apply plugin: GccCompilerPlugin
project.ext.dep_bzip2 = project(':dep/bzip2')
void setupToolchain(NativeBinarySpec b) {
boolean useGcc = project.hasProperty("useGcc")
def cfg = rootProject.createToolchainConfig(b);
cfg.projectInclude(project, '/..', '/../..', '/src', '/../../common', '/../../engine', '/../../public', '/../../public/rehlds', '/../../pm_shared');
cfg.projectInclude(dep_bzip2, '/include')
cfg.singleDefines 'USE_BREAKPAD_HANDLER', 'HLTV', 'HLTV_FIXES', 'CORE_MODULE'
if (cfg instanceof MsvcToolchainConfig) {
cfg.compilerOptions.pchConfig = new MsvcToolchainConfig.PrecompiledHeadersConfig(
enabled: true,
pchHeader: 'precompiled.h',
pchSourceSet: 'core_pch'
);
cfg.singleDefines('_CRT_SECURE_NO_WARNINGS')
cfg.compilerOptions.args '/Ob2', '/Oi', '/GF'
cfg.extraLibs "ws2_32.lib", "psapi.lib"
}
else if (cfg instanceof GccToolchainConfig) {
if (!useGcc) {
cfg.compilerOptions.pchConfig = new GccToolchainConfig.PrecompilerHeaderOptions(
enabled: true,
pchSourceSet: 'core_pch'
);
}
cfg.compilerOptions.languageStandard = 'c++11'
cfg.defines([
'_strdup': 'strdup',
'_stricmp': 'strcasecmp',
'_strnicmp': 'strncasecmp',
'_vsnprintf': 'vsnprintf',
'_snprintf': 'snprintf',
]);
if (useGcc) {
// Produce code optimized for the most common IA32/AMD64/EM64T processors.
// As new processors are deployed in the marketplace, the behavior of this option will change.
cfg.compilerOptions.args '-mtune=generic', '-Wno-write-strings', '-msse3', '-flto'
} else {
cfg.compilerOptions.args '-Qoption,cpp,--treat_func_as_string_literal_cpp'
}
cfg.compilerOptions.args '-fno-exceptions'
}
ToolchainConfigUtils.apply(project, cfg, b);
}
model {
buildTypes {
release
}
platforms {
x86 {
architecture "x86"
}
}
toolChains {
visualCpp(VisualCpp) {
}
if (project.hasProperty("useGcc")) {
gcc(Gcc)
} else {
icc(Icc)
}
}
components {
core(NativeLibrarySpec) {
targetPlatform 'x86'
baseName 'core'
sources {
core_main(CppSourceSet) {
source {
srcDir "src"
include "**/*.cpp"
exclude "precompiled.cpp"
}
lib project: ':dep/bzip2', library: 'bzip2', linkage: 'static'
}
core_common(CppSourceSet) {
source {
srcDirs "../../common", "../common"
// common
include "BaseSystemModule.cpp"
include "ObjectDictionary.cpp"
include "ObjectList.cpp"
include "TokenLine.cpp"
// HLTV common
include "BitBuffer.cpp"
include "byteorder.cpp"
include "common.cpp"
include "DemoFile.cpp"
include "DirectorCmd.cpp"
include "InfoString.cpp"
include "mathlib.cpp"
include "md5.cpp"
include "munge.cpp"
include "NetAddress.cpp"
include "NetChannel.cpp"
include "random.cpp"
}
}
core_engine(CppSourceSet) {
source {
srcDir "../../engine"
include "mem.cpp"
}
}
core_pch(CppSourceSet) {
source {
srcDir "src"
include "precompiled.cpp"
lib project: ':dep/bzip2', library: 'bzip2', linkage: 'static'
}
}
}
binaries.all {
NativeBinarySpec b -> project.setupToolchain(b)
}
}
}
}
task buildFinalize << {
if (GradleCppUtils.windows) {
return;
}
binaries.withType(SharedLibraryBinarySpec) {
def sharedBinary = it.getSharedLibraryFile();
if (sharedBinary.exists()) {
sharedBinary.renameTo(new File(sharedBinary.getParent() + "/" + sharedBinary.getName().replaceFirst("^lib", "")));
}
}
}
task buildFixes {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
task buildRelease {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
build.finalizedBy(buildFinalize);
buildFixes.finalizedBy(buildFinalize);
buildRelease.finalizedBy(buildFinalize);
// prevent static lib building
binaries.withType(StaticLibraryBinarySpec) { binary ->
buildable = false
}

View File

@ -1,171 +0,0 @@
import org.doomedsociety.gradlecpp.cfg.ToolchainConfigUtils
import org.doomedsociety.gradlecpp.msvc.MsvcToolchainConfig
import org.doomedsociety.gradlecpp.toolchain.icc.Icc
import org.doomedsociety.gradlecpp.toolchain.icc.IccCompilerPlugin
import org.doomedsociety.gradlecpp.gcc.GccToolchainConfig
import org.doomedsociety.gradlecpp.GradleCppUtils
import org.gradle.nativeplatform.NativeBinarySpec
import org.gradle.nativeplatform.NativeLibrarySpec
import org.gradle.nativeplatform.toolchain.VisualCpp
apply plugin: 'cpp'
apply plugin: IccCompilerPlugin
apply plugin: GccCompilerPlugin
void setupToolchain(NativeBinarySpec b) {
boolean useGcc = project.hasProperty("useGcc")
def cfg = rootProject.createToolchainConfig(b);
cfg.projectInclude(project, '/..', '/../..', '/src', '/../common', '/../../common', '/../../engine', '/../../public', '/../../public/rehlds', '/../../pm_shared');
cfg.singleDefines 'USE_BREAKPAD_HANDLER', 'HLTV', 'HLTV_FIXES', 'DEMOPLAYER_MODULE'
if (cfg instanceof MsvcToolchainConfig) {
cfg.compilerOptions.pchConfig = new MsvcToolchainConfig.PrecompiledHeadersConfig(
enabled: true,
pchHeader: 'precompiled.h',
pchSourceSet: 'demoplayer_pch'
);
cfg.singleDefines('_CRT_SECURE_NO_WARNINGS')
cfg.compilerOptions.args '/Ob2', '/Oi', '/GF'
cfg.extraLibs "psapi.lib"
}
else if (cfg instanceof GccToolchainConfig) {
if (!useGcc) {
cfg.compilerOptions.pchConfig = new GccToolchainConfig.PrecompilerHeaderOptions(
enabled: true,
pchSourceSet: 'demoplayer_pch'
);
}
cfg.compilerOptions.languageStandard = 'c++11'
cfg.defines([
'_strdup': 'strdup',
'_stricmp': 'strcasecmp',
'_strnicmp': 'strncasecmp',
'_vsnprintf': 'vsnprintf',
'_snprintf': 'snprintf',
]);
if (useGcc) {
// Produce code optimized for the most common IA32/AMD64/EM64T processors.
// As new processors are deployed in the marketplace, the behavior of this option will change.
cfg.compilerOptions.args '-mtune=generic', '-Wno-write-strings', '-msse3', '-flto'
} else {
cfg.compilerOptions.args '-Qoption,cpp,--treat_func_as_string_literal_cpp'
}
cfg.compilerOptions.args '-fno-exceptions'
}
ToolchainConfigUtils.apply(project, cfg, b);
}
model {
buildTypes {
release
}
platforms {
x86 {
architecture "x86"
}
}
toolChains {
visualCpp(VisualCpp) {
}
if (project.hasProperty("useGcc")) {
gcc(Gcc)
} else {
icc(Icc)
}
}
components {
demoplayer(NativeLibrarySpec) {
targetPlatform 'x86'
baseName 'demoplayer'
sources {
demoplayer_main(CppSourceSet) {
source {
srcDir "src"
include "**/*.cpp"
exclude "precompiled.cpp"
}
}
demoplayer_common(CppSourceSet) {
source {
srcDirs "/../../common", "/../common"
// common
include "BaseSystemModule.cpp"
include "ObjectDictionary.cpp"
include "ObjectList.cpp"
include "TokenLine.cpp"
// HLTV common
include "BitBuffer.cpp"
include "byteorder.cpp"
include "common.cpp"
include "DirectorCmd.cpp"
include "mathlib.cpp"
}
}
demoplayer_engine(CppSourceSet) {
source {
srcDir "../../engine"
include "mem.cpp"
}
}
demoplayer_pch(CppSourceSet) {
source {
srcDir "src"
include "precompiled.cpp"
}
}
}
binaries.all {
NativeBinarySpec b -> project.setupToolchain(b)
}
}
}
}
task buildFinalize << {
if (GradleCppUtils.windows) {
return;
}
binaries.withType(SharedLibraryBinarySpec) {
def sharedBinary = it.getSharedLibraryFile();
if (sharedBinary.exists()) {
sharedBinary.renameTo(new File(sharedBinary.getParent() + "/" + sharedBinary.getName().replaceFirst("^lib", "")));
}
}
}
task buildFixes {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
task buildRelease {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
build.finalizedBy(buildFinalize);
buildFixes.finalizedBy(buildFinalize);
buildRelease.finalizedBy(buildFinalize);
// prevent static lib building
binaries.withType(StaticLibraryBinarySpec) { binary ->
buildable = false
}

View File

@ -1,171 +0,0 @@
import org.doomedsociety.gradlecpp.cfg.ToolchainConfigUtils
import org.doomedsociety.gradlecpp.msvc.MsvcToolchainConfig
import org.doomedsociety.gradlecpp.toolchain.icc.Icc
import org.doomedsociety.gradlecpp.toolchain.icc.IccCompilerPlugin
import org.doomedsociety.gradlecpp.gcc.GccToolchainConfig
import org.doomedsociety.gradlecpp.GradleCppUtils
import org.gradle.nativeplatform.NativeBinarySpec
import org.gradle.nativeplatform.NativeLibrarySpec
import org.gradle.nativeplatform.toolchain.VisualCpp
apply plugin: 'cpp'
apply plugin: IccCompilerPlugin
apply plugin: GccCompilerPlugin
void setupToolchain(NativeBinarySpec b) {
boolean useGcc = project.hasProperty("useGcc")
def cfg = rootProject.createToolchainConfig(b);
cfg.projectInclude(project, '/..', '/../..', '/src', '/../../common', '/../../engine', '/../../public', '/../../public/rehlds', '/../../pm_shared');
cfg.singleDefines 'USE_BREAKPAD_HANDLER', 'HLTV', 'HLTV_FIXES', 'DIRECTOR_MODULE'
if (cfg instanceof MsvcToolchainConfig) {
cfg.compilerOptions.pchConfig = new MsvcToolchainConfig.PrecompiledHeadersConfig(
enabled: true,
pchHeader: 'precompiled.h',
pchSourceSet: 'director_pch'
);
cfg.singleDefines('_CRT_SECURE_NO_WARNINGS')
cfg.compilerOptions.args '/Ob2', '/Oi', '/GF'
}
else if (cfg instanceof GccToolchainConfig) {
if (!useGcc) {
cfg.compilerOptions.pchConfig = new GccToolchainConfig.PrecompilerHeaderOptions(
enabled: true,
pchSourceSet: 'director_pch'
);
}
cfg.compilerOptions.languageStandard = 'c++0x'
cfg.defines([
'_strdup': 'strdup',
'_stricmp': 'strcasecmp',
'_strnicmp': 'strncasecmp',
'_vsnprintf': 'vsnprintf',
'_snprintf': 'snprintf',
]);
if (useGcc) {
// Produce code optimized for the most common IA32/AMD64/EM64T processors.
// As new processors are deployed in the marketplace, the behavior of this option will change.
cfg.compilerOptions.args '-mtune=generic', '-Wno-write-strings', '-msse3', '-flto'
} else {
cfg.compilerOptions.args '-Qoption,cpp,--treat_func_as_string_literal_cpp'
}
cfg.compilerOptions.args '-fno-exceptions'
}
ToolchainConfigUtils.apply(project, cfg, b);
}
model {
buildTypes {
release
}
platforms {
x86 {
architecture "x86"
}
}
toolChains {
visualCpp(VisualCpp) {
}
if (project.hasProperty("useGcc")) {
gcc(Gcc)
} else {
icc(Icc)
}
}
components {
director(NativeLibrarySpec) {
targetPlatform 'x86'
baseName 'director'
sources {
director_main(CppSourceSet) {
source {
srcDir "src"
include "**/*.cpp"
exclude "precompiled.cpp"
}
}
director_common(CppSourceSet) {
source {
srcDirs "../../common", "../common"
// common
include "BaseSystemModule.cpp"
include "ObjectDictionary.cpp"
include "ObjectList.cpp"
include "TokenLine.cpp"
// HLTV common
include "BitBuffer.cpp"
include "byteorder.cpp"
include "common.cpp"
include "DirectorCmd.cpp"
include "mathlib.cpp"
include "random.cpp"
}
}
director_engine(CppSourceSet) {
source {
srcDir "../../engine"
include "mem.cpp"
}
}
director_pch(CppSourceSet) {
source {
srcDir "src"
include "precompiled.cpp"
}
}
}
binaries.all {
NativeBinarySpec b -> project.setupToolchain(b)
}
}
}
}
task buildFinalize << {
if (GradleCppUtils.windows) {
return;
}
binaries.withType(SharedLibraryBinarySpec) {
def sharedBinary = it.getSharedLibraryFile();
if (sharedBinary.exists()) {
sharedBinary.renameTo(new File(sharedBinary.getParent() + "/" + sharedBinary.getName().replaceFirst("^lib", "")));
}
}
}
task buildFixes {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
task buildRelease {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
build.finalizedBy(buildFinalize);
buildFixes.finalizedBy(buildFinalize);
buildRelease.finalizedBy(buildFinalize);
// prevent static lib building
binaries.withType(StaticLibraryBinarySpec) { binary ->
buildable = false
}

View File

@ -1,198 +0,0 @@
import org.doomedsociety.gradlecpp.cfg.ToolchainConfigUtils
import org.doomedsociety.gradlecpp.msvc.MsvcToolchainConfig
import org.doomedsociety.gradlecpp.toolchain.icc.Icc
import org.doomedsociety.gradlecpp.toolchain.icc.IccCompilerPlugin
import org.doomedsociety.gradlecpp.gcc.GccToolchainConfig
import org.doomedsociety.gradlecpp.GradleCppUtils
import org.gradle.nativeplatform.NativeBinarySpec
import org.gradle.nativeplatform.NativeLibrarySpec
import org.gradle.nativeplatform.toolchain.VisualCpp
apply plugin: 'cpp'
apply plugin: IccCompilerPlugin
apply plugin: GccCompilerPlugin
project.ext.dep_bzip2 = project(':dep/bzip2')
void setupToolchain(NativeBinarySpec b) {
boolean useGcc = project.hasProperty("useGcc")
def cfg = rootProject.createToolchainConfig(b);
cfg.projectInclude(project, '/..', '/../..', '/src', '/../Director/src', '/../../common', '/../../engine', '/../../public', '/../../public/rehlds', '/../../pm_shared');
cfg.projectInclude(dep_bzip2, '/include')
cfg.singleDefines 'USE_BREAKPAD_HANDLER', 'HLTV', 'HLTV_FIXES', 'PROXY_MODULE'
if (cfg instanceof MsvcToolchainConfig) {
cfg.compilerOptions.pchConfig = new MsvcToolchainConfig.PrecompiledHeadersConfig(
enabled: true,
pchHeader: 'precompiled.h',
pchSourceSet: 'proxy_pch'
);
cfg.singleDefines('_CRT_SECURE_NO_WARNINGS')
cfg.compilerOptions.args '/Ob2', '/Oi', '/GF'
cfg.projectLibpath(project, '/../../lib')
cfg.extraLibs "steam_api.lib", "psapi.lib", "ws2_32.lib"
}
else if (cfg instanceof GccToolchainConfig) {
if (!useGcc) {
cfg.compilerOptions.pchConfig = new GccToolchainConfig.PrecompilerHeaderOptions(
enabled: true,
pchSourceSet: 'proxy_pch'
);
}
cfg.compilerOptions.languageStandard = 'c++11'
cfg.defines([
'_strdup': 'strdup',
'_stricmp': 'strcasecmp',
'_strnicmp': 'strncasecmp',
'_vsnprintf': 'vsnprintf',
'_snprintf': 'snprintf',
]);
if (useGcc) {
// Produce code optimized for the most common IA32/AMD64/EM64T processors.
// As new processors are deployed in the marketplace, the behavior of this option will change.
cfg.compilerOptions.args '-mtune=generic', '-Wno-write-strings', '-msse3', '-flto'
} else {
cfg.compilerOptions.args '-Qoption,cpp,--treat_func_as_string_literal_cpp'
}
cfg.compilerOptions.args '-fno-exceptions'
cfg.projectLibpath(project, '/../../lib/linux32')
cfg.extraLibs "steam_api"
}
ToolchainConfigUtils.apply(project, cfg, b);
}
model {
buildTypes {
release
}
platforms {
x86 {
architecture "x86"
}
}
toolChains {
visualCpp(VisualCpp) {
}
if (project.hasProperty("useGcc")) {
gcc(Gcc)
} else {
icc(Icc)
}
}
components {
proxy(NativeLibrarySpec) {
targetPlatform 'x86'
baseName 'proxy'
sources {
proxy_main(CppSourceSet) {
source {
srcDir "src"
include "**/*.cpp"
exclude "precompiled.cpp"
}
lib project: ':dep/bzip2', library: 'bzip2', linkage: 'static'
}
proxy_common(CppSourceSet) {
source {
srcDirs "../../common", "../common"
// common
include "BaseSystemModule.cpp"
include "ObjectDictionary.cpp"
include "ObjectList.cpp"
include "TokenLine.cpp"
// HLTV common
include "BaseClient.cpp"
include "BitBuffer.cpp"
include "byteorder.cpp"
include "common.cpp"
include "DemoFile.cpp"
include "DirectorCmd.cpp"
include "InfoString.cpp"
include "mathlib.cpp"
include "md5.cpp"
include "munge.cpp"
include "NetAddress.cpp"
include "NetChannel.cpp"
include "random.cpp"
}
}
proxy_engine(CppSourceSet) {
source {
srcDir "../../engine"
include "mem.cpp"
}
}
proxy_director(CppSourceSet) {
source {
srcDir "../Director/src"
include "Director.cpp"
}
}
proxy_pch(CppSourceSet) {
source {
srcDir "src"
include "precompiled.cpp"
lib project: ':dep/bzip2', library: 'bzip2', linkage: 'static'
}
}
}
binaries.all {
NativeBinarySpec b -> project.setupToolchain(b)
}
}
}
}
task buildFinalize << {
if (GradleCppUtils.windows) {
return;
}
binaries.withType(SharedLibraryBinarySpec) {
def sharedBinary = it.getSharedLibraryFile();
if (sharedBinary.exists()) {
sharedBinary.renameTo(new File(sharedBinary.getParent() + "/" + sharedBinary.getName().replaceFirst("^lib", "")));
}
}
}
task buildFixes {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
task buildRelease {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
build.finalizedBy(buildFinalize);
buildFixes.finalizedBy(buildFinalize);
buildRelease.finalizedBy(buildFinalize);
// prevent static lib building
binaries.withType(StaticLibraryBinarySpec) { binary ->
buildable = false
}

View File

@ -1,15 +1,5 @@
## Half-Life TV for Goldsrc based games <img align="right" src="https://cloud.githubusercontent.com/assets/5860435/25316484/320cce9a-2892-11e7-8d12-3290baf4458c.png" alt="HLTV Launcher"/>
### Building
On Windows:
<pre>gradlew --max-workers=1 clean rehlds/HLTV:build</pre>
On Linux (ICC):
<pre>./gradlew --max-workers=1 clean rehlds/HLTV:build</pre>
On Linux (GCC):
<pre>./gradlew --max-workers=1 -PuseGcc clean rehlds/HLTV:build</pre>
Compiled binaries will be placed in each project rehlds/HLTV/**/binaries/ directory
### Overview

View File

@ -1,13 +0,0 @@
evaluationDependsOn(':rehlds/HLTV/Console');
evaluationDependsOn(':rehlds/HLTV/Core');
evaluationDependsOn(':rehlds/HLTV/DemoPlayer');
evaluationDependsOn(':rehlds/HLTV/Director');
evaluationDependsOn(':rehlds/HLTV/Proxy');
task build {
dependsOn project(':rehlds/HLTV/Console').tasks.build,
project(':rehlds/HLTV/Core').tasks.build,
project(':rehlds/HLTV/DemoPlayer').tasks.build,
project(':rehlds/HLTV/Director').tasks.build,
project(':rehlds/HLTV/Proxy').tasks.build
}

View File

@ -1,425 +0,0 @@
import gradlecpp.RehldsPlayTestPlugin
import gradlecpp.RehldsPlayTestTask
import gradlecpp.VelocityUtils
import org.doomedsociety.gradlecpp.GradleCppUtils
import org.doomedsociety.gradlecpp.LazyNativeDepSet
import org.doomedsociety.gradlecpp.cfg.ToolchainConfig
import org.doomedsociety.gradlecpp.cfg.ToolchainConfigUtils
import org.doomedsociety.gradlecpp.gcc.GccToolchainConfig
import org.doomedsociety.gradlecpp.msvc.EnhancedInstructionsSet
import org.doomedsociety.gradlecpp.msvc.FloatingPointModel
import org.doomedsociety.gradlecpp.msvc.MsvcToolchainConfig
import org.doomedsociety.gradlecpp.toolchain.icc.Icc
import org.doomedsociety.gradlecpp.toolchain.icc.IccCompilerPlugin
import org.gradle.language.cpp.CppSourceSet
import org.gradle.nativeplatform.NativeBinarySpec
import org.gradle.nativeplatform.NativeExecutableSpec
import org.gradle.nativeplatform.NativeLibrarySpec
import org.gradle.nativeplatform.SharedLibraryBinarySpec
import rehlds.testdemo.RehldsDemoRunner
import versioning.RehldsVersionInfo
import org.apache.commons.io.FilenameUtils
apply plugin: 'cpp'
apply plugin: IccCompilerPlugin
apply plugin: GccCompilerPlugin
apply plugin: RehldsPlayTestPlugin
apply plugin: gradlecpp.CppUnitTestPlugin
repositories {
maven {
url 'http://nexus.rehlds.org/nexus/content/repositories/rehlds-releases/'
}
}
configurations {
rehlds_tests
}
dependencies {
rehlds_tests 'rehlds.testdemos:hl-phys-single1:1.1'
rehlds_tests 'rehlds.testdemos:crossfire-1-multiplayer-1:1.1'
rehlds_tests 'rehlds.testdemos:cstrike-muliplayer-1:1.1'
rehlds_tests 'rehlds.testdemos:shooting-hl-1:1.1'
}
project.ext.dep_bzip2 = project(':dep/bzip2')
project.ext.dep_cppunitlite = project(':dep/cppunitlite')
void createIntergrationTestTask(NativeBinarySpec b) {
boolean rehldsFixes = b.flavor.name.contains('rehldsFixes')
if (!(b instanceof SharedLibraryBinarySpec)) return
if (!GradleCppUtils.windows) return
if (rehldsFixes) return
def libLinkTask = GradleCppUtils.getLinkTask(b)
String unitTestTask = b.hasProperty('cppUnitTestTask') ? b.cppUnitTestTask : null
def depFiles = []
depFiles.addAll(libLinkTask.outputs.files.files)
def demoItgTestTask = project.tasks.create(b.namingScheme.getTaskName('demoItgTest'), RehldsPlayTestTask)
demoItgTestTask.with {
rehldsImageRoot = new File(project.projectDir, '_rehldsTestImg')
rehldsTestLogs = new File(this.project.buildDir, "_rehldsTestLogs/${b.name}")
testDemos = project.configurations.rehlds_tests
testFor = b
//inputs/outputs for up-to-date check
inputs.files depFiles
inputs.files testDemos.files
outputs.dir rehldsTestLogs
//dependencies on library and test executable
dependsOn libLinkTask
if (unitTestTask) {
dependsOn unitTestTask
}
postExtractAction {
def binaryOutFile = GradleCppUtils.getBinaryOutputFile(b)
GradleCppUtils.copyFile(binaryOutFile, new File(rehldsImageRoot, binaryOutFile.name), true)
}
}
b.buildTask.dependsOn demoItgTestTask
}
void setupUnitTests(NativeBinarySpec bin) {
boolean unitTestExecutable = bin.component.name.endsWith('_tests')
if (!unitTestExecutable) return
GradleCppUtils.getLinkTask(bin).doLast {
String srcPath = '' + projectDir + (GradleCppUtils.windows ? '/lib/steam_api.dll' : '/lib/linux32/libsteam_api.so')
String dstPath = bin.executableFile.parent + (GradleCppUtils.windows ? '/steam_api.dll' : '/libsteam_api.so')
GradleCppUtils.copyFile(srcPath, dstPath, true)
}
}
void postEvaluate(NativeBinarySpec b) {
// attach generateAppVersion task to all 'compile source' tasks
GradleCppUtils.getCompileTasks(b).each { Task t ->
t.dependsOn project.generateAppVersion
}
setupUnitTests(b)
createIntergrationTestTask(b)
}
void setupToolchain(NativeBinarySpec b) {
boolean useGcc = project.hasProperty("useGcc")
boolean unitTestExecutable = b.component.name.endsWith('_tests')
boolean swdsLib = b.name.toLowerCase().contains('swds')
boolean rehldsFixes = b.flavor.name.contains('rehldsFixes')
boolean release = b.buildType.name.toLowerCase() == 'release';
ToolchainConfig cfg = rootProject.createToolchainConfig(b)
cfg.projectInclude(project, '', '/public/rehlds', '/engine', '/common', '/pm_shared', '/rehlds', '/testsuite', '/hookers', '/public')
cfg.projectInclude(dep_bzip2, '/include')
if (unitTestExecutable) {
cfg.projectInclude(dep_cppunitlite, '/include')
b.lib LazyNativeDepSet.create(dep_cppunitlite, 'cppunitlite', b.buildType.name, true)
}
b.lib LazyNativeDepSet.create(dep_bzip2, 'bzip2', b.buildType.name, true)
cfg.singleDefines 'USE_BREAKPAD_HANDLER', 'DEDICATED', 'SWDS', 'REHLDS_SELF', 'REHLDS_OPT_PEDANTIC', 'REHLDS_API'
if (cfg instanceof MsvcToolchainConfig) {
cfg.compilerOptions.pchConfig = new MsvcToolchainConfig.PrecompiledHeadersConfig(
enabled: true,
pchHeader: 'precompiled.h',
pchSourceSet: 'rehlds_pch'
)
cfg.singleDefines('_CRT_SECURE_NO_WARNINGS')
if (!rehldsFixes) {
cfg.compilerOptions.floatingPointModel = FloatingPointModel.PRECISE
cfg.compilerOptions.enhancedInstructionsSet = EnhancedInstructionsSet.DISABLED
} else {
cfg.compilerOptions.args '/Oi', '/GF', '/GR-', '/GS-'
}
if (swdsLib) {
cfg.linkerOptions.randomizedBaseAddress = false
cfg.linkerOptions.baseAddress = '0x4970000'
}
cfg.projectLibpath(project, '/lib')
cfg.extraLibs 'steam_api.lib', 'psapi.lib', 'ws2_32.lib', 'kernel32.lib', 'user32.lib', 'advapi32.lib', 'libacof32.lib'
} else if (cfg instanceof GccToolchainConfig) {
if (!useGcc) {
cfg.compilerOptions.pchConfig = new GccToolchainConfig.PrecompilerHeaderOptions(
enabled: true,
pchSourceSet: 'rehlds_pch'
)
}
cfg.compilerOptions.languageStandard = 'c++11'
cfg.defines([
'_stricmp': 'strcasecmp',
'_strnicmp': 'strncasecmp',
'_strdup': 'strdup',
'_unlink': 'unlink',
'_vsnprintf': 'vsnprintf',
'_vsnwprintf' : 'vswprintf',
]);
if (useGcc) {
// Produce code optimized for the most common IA32/AMD64/EM64T processors.
// As new processors are deployed in the marketplace, the behavior of this option will change.
cfg.compilerOptions.args '-mtune=generic', '-Wno-write-strings', '-msse3'
} else {
cfg.compilerOptions.args '-Qoption,cpp,--treat_func_as_string_literal_cpp'
}
cfg.compilerOptions.args '-ffunction-sections', '-fdata-sections' // Remove unused code and data
cfg.compilerOptions.args '-fno-rtti', '-fno-exceptions'
cfg.linkerOptions.args '-Wl,--version-script=../version_script.lds', '-Wl,--gc-sections'
cfg.projectLibpath(project, '/lib/linux32')
cfg.extraLibs 'rt', 'dl', 'm', 'steam_api', 'aelf32'
}
if (unitTestExecutable) {
cfg.singleDefines 'REHLDS_UNIT_TESTS', 'REHLDS_SSE', 'REHLDS_JIT'
}
if (rehldsFixes) {
cfg.singleDefines 'REHLDS_FIXES', 'REHLDS_SSE', 'REHLDS_JIT', 'REHLDS_CHECKS', 'HAVE_OPT_STRTOOLS'
}
if (!release) {
cfg.singleDefines 'REHLDS_FLIGHT_REC'
}
ToolchainConfigUtils.apply(project, cfg, b)
GradleCppUtils.onTasksCreated(project, 'postEvaluate', {
postEvaluate(b)
})
}
class RehldsSrc {
static void rehlds_src(def h) {
h.rehlds_engine(CppSourceSet) {
source {
srcDirs "engine", "rehlds", "version"
if (GradleCppUtils.windows) srcDirs "testsuite"
include "**/*.cpp"
exclude "precompiled.cpp"
}
}
h.rehlds_public(CppSourceSet) {
source {
srcDirs "public"
include "registry.cpp", "steamid.cpp", "utlbuffer.cpp", "tier0/dbg.cpp"
}
}
h.rehlds_memory(CppSourceSet) {
source {
srcDirs "hookers"
include "memory.cpp"
}
}
h.rehlds_common(CppSourceSet) {
source {
srcDirs "common"
include "BaseSystemModule.cpp", "ObjectList.cpp", "TokenLine.cpp"
}
}
}
static void rehlds_pch(def h) {
h.rehlds_pch(CppSourceSet) {
source {
srcDirs "rehlds"
include "precompiled.cpp"
}
}
}
static void rehlds_swds_main_src(def h) {
h.rehlds_swds_main_src(CppSourceSet) {
source {
srcDir "hookers"
include "engine/main_swds.cpp"
}
}
}
static void rehlds_tests_src(def h) {
h.rehlds_tests_src(CppSourceSet) {
source {
srcDir "unittests"
include "**/*.cpp"
}
}
}
}
model {
buildTypes {
debug
release
}
platforms {
x86 {
architecture "x86"
}
}
toolChains {
visualCpp(VisualCpp)
if (project.hasProperty("useGcc")) {
gcc(Gcc)
} else {
icc(Icc)
}
}
flavors {
rehldsNofixes
rehldsFixes
}
components {
rehlds_swds_engine(NativeLibrarySpec) {
targetPlatform 'x86'
baseName GradleCppUtils.windows ? 'swds' : 'engine_i486'
sources {
RehldsSrc.rehlds_pch(it)
RehldsSrc.rehlds_src(it)
RehldsSrc.rehlds_swds_main_src(it)
}
binaries.all { NativeBinarySpec b -> project.setupToolchain(b) }
}
rehlds_swds_engine_tests(NativeExecutableSpec) {
targetPlatform 'x86'
sources {
RehldsSrc.rehlds_pch(it)
RehldsSrc.rehlds_src(it)
RehldsSrc.rehlds_tests_src(it)
}
binaries.all { NativeBinarySpec b -> project.setupToolchain(b) }
}
}
}
task buildFinalize << {
if (GradleCppUtils.windows) {
return;
}
binaries.withType(SharedLibraryBinarySpec) {
def sharedBinary = it.getSharedLibraryFile();
if (sharedBinary.exists()) {
sharedBinary.renameTo(new File(sharedBinary.getParent() + "/" + sharedBinary.getName().replaceFirst("^lib", "")));
}
}
}
task buildRelease {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching { SharedLibraryBinarySpec blib ->
blib.buildable && blib.buildType.name == 'release'
}
}
task buildFixes {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching {
SharedLibraryBinarySpec blib -> blib.buildable && blib.buildType.name == 'release' && blib.flavor.name == 'rehldsFixes' && blib.component.name == 'rehlds_swds_engine'
}
}
task buildEngine {
dependsOn binaries.withType(SharedLibraryBinarySpec).matching {
SharedLibraryBinarySpec blib -> blib.buildable && blib.buildType.name == 'release' && blib.flavor.name == 'rehldsFixes' && blib.component.name == 'rehlds_swds_engine'
}
}
buildFixes.finalizedBy(buildFinalize);
buildEngine.finalizedBy(buildFinalize);
buildRelease.finalizedBy(buildFinalize);
gradle.taskGraph.whenReady { graph ->
if (!graph.hasTask(buildFixes) && !graph.hasTask(buildEngine)) {
return;
}
// skip all tasks with the matched substrings in the name like "test"
def tasks = graph.getAllTasks();
tasks.findAll { it.name.toLowerCase().contains("test") }.each { task ->
task.enabled = false;
}
}
task prepareDevEnvTests {
def rehldsTests = new File(project.projectDir, '_dev/testDemos')
inputs.files configurations.rehlds_tests.files
outputs.dir rehldsTests
doLast {
rehldsTests.mkdirs()
configurations.rehlds_tests.files.each { File f ->
def t = zipTree(f)
copy {
into new File(rehldsTests, FilenameUtils.getBaseName(f.absolutePath))
from t
}
}
}
}
task prepareDevEnvEngine << {
['_dev/rehlds', '_dev/rehlds_swds'].each { engineDir ->
def rehldsImage = new File(project.projectDir, engineDir)
rehldsImage.mkdirs()
def demoRunner = new RehldsDemoRunner(project.configurations.rehlds_playtest_image.getFiles(), rehldsImage, null)
demoRunner.prepareEngine()
}
}
task prepareDevEnv {
dependsOn prepareDevEnvEngine, prepareDevEnvTests
}
tasks.clean.doLast {
project.file('version/appversion.h').delete()
}
task generateAppVersion {
RehldsVersionInfo verInfo = (RehldsVersionInfo) rootProject.rehldsVersionInfo
def tplFile = project.file('version/appversion.vm')
def renderedFile = project.file('version/appversion.h')
// check to up-to-date
inputs.file tplFile
inputs.file project.file('gradle.properties')
outputs.file renderedFile
// this will ensure that this task is redone when the versions change
inputs.property('version', rootProject.version)
inputs.property('commitDate', verInfo.asCommitDate())
println "##teamcity[buildNumber '" + verInfo.asMavenVersion(false) + "']";
doLast {
def templateCtx = [
verInfo : verInfo
]
def content = VelocityUtils.renderTemplate(tplFile, templateCtx)
renderedFile.delete()
renderedFile.write(content, 'utf-8')
println 'The current ReHLDS maven version is ' + rootProject.version + ', url: (' + verInfo.commitURL + '' + verInfo.commitSHA + ')';
}
}

Some files were not shown because too many files have changed in this diff Show More