This commit is contained in:
Emily Hudson 2020-06-26 18:11:51 +01:00
commit 7fab9d0c00
49 changed files with 33891 additions and 0 deletions

31
.circleci/.circleci.yml Normal file
View File

@ -0,0 +1,31 @@
version: 2.1
jobs:
build:
working_directory: ~/oxide
docker:
- image: ubuntu:rolling
steps:
- run:
name: Install dependencies
command: |
apt-get update -qq
apt-get install -y build-essential clang-6.0 cmake git wget ssh patchelf libgl1-mesa-dev libglu1-mesa-dev
- checkout
- run: git submodule sync
- run: git submodule update --init --recursive
- run:
name: Generate Make files
command: |
chmod +x premake5
chmod +x create_projects.sh
./create_projects.sh
- run:
name: Build
command: |
cd premake
ln -s /usr/bin/clang-6.0 /usr/bin/clang
ln -s /usr/bin/clang++-6.0 /usr/bin/clang++
make -j2
- store_artifacts:
path: ~/oxide/bin/Debug/liboxide.a
destination: liboxide.a

16
.clang-format Normal file
View File

@ -0,0 +1,16 @@
---
ColumnLimit: '0'
AllowShortCaseLabelsOnASingleLine: 'true'
AllowShortIfStatementsOnASingleLine: 'true'
AllowShortLoopsOnASingleLine: 'true'
AccessModifierOffset: '-4'
IndentCaseLabels: 'false'
IndentWidth: '4'
Standard: Cpp11
AlignOperands: 'true'
AlignConsecutiveAssignments: 'true'
AlignEscapedNewlinesLeft: 'true'
AlignConsecutiveDeclarations: 'true'
AlignAfterOpenBracket: 'true'
BinPackParameters: 'true'
...

618
.gitignore vendored Normal file
View File

@ -0,0 +1,618 @@
# Created by https://www.gitignore.io/api/vim,c++,linux,emacs,windows,eclipse,jetbrains,qtcreator,visualstudio,premake-gmake,visualstudiocode
### C++ ###
# Prerequisites
*.d
# Compiled Object files
*.slo
*.lo
*.o
*.obj
# Precompiled Headers
*.gch
*.pch
# Compiled Dynamic libraries
*.so
*.dylib
*.dll
# Fortran module files
*.mod
*.smod
# Compiled Static libraries
*.lai
*.la
*.a
# *.lib
# Executables
*.out
*.app
### Eclipse ###
.metadata
bin/
tmp/
*.tmp
*.bak
*.swp
*~.nib
local.properties
.settings/
.loadpath
.recommenders
# External tool builders
.externalToolBuilders/
# Locally stored "Eclipse launch configurations"
*.launch
# PyDev specific (Python IDE for Eclipse)
*.pydevproject
# CDT-specific (C/C++ Development Tooling)
.cproject
# Java annotation processor (APT)
.factorypath
# PDT-specific (PHP Development Tools)
.buildpath
# sbteclipse plugin
.target
# Tern plugin
.tern-project
# TeXlipse plugin
.texlipse
# STS (Spring Tool Suite)
.springBeans
# Code Recommenders
.recommenders/
# Scala IDE specific (Scala & Java development for Eclipse)
.cache-main
.scala_dependencies
.worksheet
### Eclipse Patch ###
# Eclipse Core
.project
# JDT-specific (Eclipse Java Development Tools)
.classpath
### Emacs ###
# -*- mode: gitignore; -*-
*~
\#*\#
/.emacs.desktop
/.emacs.desktop.lock
*.elc
auto-save-list
tramp
.\#*
# Org-mode
.org-id-locations
*_archive
# flymake-mode
*_flymake.*
# eshell files
/eshell/history
/eshell/lastdir
# elpa packages
/elpa/
# reftex files
*.rel
# AUCTeX auto folder
/auto/
# cask packages
.cask/
dist/
# Flycheck
flycheck_*.el
# server auth directory
/server/
# projectiles files
.projectile
projectile-bookmarks.eld
# directory configuration
.dir-locals.el
# saveplace
places
# url cache
url/cache/
# cedet
ede-projects.el
# smex
smex-items
# company-statistics
company-statistics-cache.el
# anaconda-mode
anaconda-mode/
### JetBrains ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff:
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/dictionaries
# Sensitive or high-churn files:
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.xml
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
# Gradle:
.idea/**/gradle.xml
.idea/**/libraries
# CMake
cmake-build-debug/
# Mongo Explorer plugin:
.idea/**/mongoSettings.xml
## File-based project format:
*.iws
## Plugin-specific files:
# IntelliJ
/out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# Ruby plugin and RubyMine
/.rakeTasks
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
### JetBrains Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
.idea/sonarlint
### Linux ###
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### premake-gmake ###
Makefile
*.make
obj/
### QtCreator ###
# gitignore for Qt Creator like IDE for pure C/C++ project without Qt
#
# Reference: http://doc.qt.io/qtcreator/creator-project-generic.html
# Qt Creator autogenerated files
# A listing of all the files included in the project
*.files
# Include directories
*.includes
# Project configuration settings like predefined Macros
*.config
# Qt Creator settings
*.creator
# User project settings
*.creator.user*
# Qt Creator backups
*.autosave
### Vim ###
# swap
.sw[a-p]
.*.sw[a-p]
# session
Session.vim
# temporary
.netrwhist
# auto-generated tag files
tags
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
.history
### Windows ###
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Folder config file
Desktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msm
*.msp
# Windows shortcuts
*.lnk
### VisualStudio ###
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
# Visual Studio 2015 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
**/Properties/launchSettings.json
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# TODO: Uncomment the next line to ignore your web deploy settings.
# By default, sensitive information, such as encrypted password
# should be stored in the .pubxml.user file.
#*.pubxml
*.pubxml.user
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/packages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Typescript v1 declaration files
typings/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
### VisualStudio Patch ###
# By default, sensitive information, such as encrypted password
# should be stored in the .pubxml.user file.
# End of https://www.gitignore.io/api/vim,c++,linux,emacs,windows,eclipse,jetbrains,qtcreator,visualstudio,premake-gmake,visualstudiocode
# CUSTOM #
bin/
*.vcxproj
*.sln
premake/
gdb.txt
core
compile_commands.json
.cquery_cached_index

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "premake_modules"]
path = premake_modules
url = https://github.com/josh33901/premake-export-compile-commands.git

6
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,6 @@
{
"files.associations": {
"*.txt": "plaintext",
"xiosbase": "c"
}
}

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
Copyright (c) 2020 Emily Hudson
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

2
README.md Normal file
View File

@ -0,0 +1,2 @@
# Oxide
Crossplatform overlay window

8
create_projects.bat Normal file
View File

@ -0,0 +1,8 @@
@echo off
echo creating projects...
premake5 vs2017
premake5 export-compile-commands
echo finished.

6
create_projects.sh Normal file
View File

@ -0,0 +1,6 @@
echo creating projects...
./premake5 "gmake"
./premake5 "export-compile-commands"
echo finished.

25
import.lua Normal file
View File

@ -0,0 +1,25 @@
project "oxide_test"
kind "ConsoleApp"
language "C"
targetdir "bin/%{cfg.platform}_%{cfg.buildcfg}/%{prj.name}"
links {"oxide2"}
includedirs { "include" }
files { "test/**.h", "test/**.c", "include/**.h" }
project "oxide2"
kind "StaticLib"
language "C"
targetdir "lib/%{cfg.platform}_%{cfg.buildcfg}/%{prj.name}"
libdirs{"src/lib/RelWithDebInfo"}
filter{"system:windows"}
links{"OpenGL32", "dwmapi"}
filter{}
links{"glfw3", "glew32s.lib"}
includedirs { "src", "include" }
files { "src/**.h", "src/**.c", "include/**.h" }

24
include/oxide.h Normal file
View File

@ -0,0 +1,24 @@
#pragma once
#include <stdbool.h>
typedef enum Oxide_InitResult {
oxide_init_success,
oxide_init_window_failed,
oxide_init_gl_failed,
oxide_init_target_failed,
} Oxide_InitResult;
extern Oxide_InitResult oxide_init(const char *target_name);
extern void oxide_shutdown();
extern bool oxide_set_target(const char *target_name);
extern void oxide_begin_frame();
extern void oxide_end_frame();
typedef struct Oxide_WindowSize {
int width;
int height;
} Oxide_WindowSize;
extern Oxide_WindowSize oxide_resize();

Binary file not shown.

12
make.bat Normal file
View File

@ -0,0 +1,12 @@
pushd %~dp0
call create_projects
echo building...
"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Enterprise\\MSBuild\\15.0\\Bin\\msbuild" /property:GenerateFullPaths=true /t:build premake\oxide2.sln /p:Configuration=Debug /verbosity:minimal
echo done.
popd
exit

12
make.sh Normal file
View File

@ -0,0 +1,12 @@
#!/bin/sh
# set cwd to script directory
cd "${0%/*}"
./create_projects.sh
echo building...
cd premake/
make -j4
echo done.
exit

12
make_release.bat Normal file
View File

@ -0,0 +1,12 @@
pushd %~dp0
call create_projects
echo Building RELEASE...
"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Enterprise\\MSBuild\\15.0\\Bin\\msbuild" /property:GenerateFullPaths=true /t:build premake\oxide2.sln /p:Configuration=Release /verbosity:minimal
echo done.
popd
exit

BIN
premake5 Normal file

Binary file not shown.

BIN
premake5.exe Normal file

Binary file not shown.

69
premake5.lua Normal file
View File

@ -0,0 +1,69 @@
require("premake_modules/export-compile-commands")
require("premake_modules/cmake")
workspace "oxide2"
configurations { "Debug", "Release" }
platforms { "x64", "x32" }
location "premake"
filter {"system:windows"}
characterset "MBCS"
filter {}
-- Set up platforms
filter {"platforms:x32"}
architecture "x32"
filter {"platforms:x64"}
architecture "x64"
filter {}
-- Setup c++ spec per platform
-- Linux uses a buildoption to allow for more
-- up to date standards (2a)
filter {"system:windows"}
toolset "msc-v141"
filter {"system:linux"}
toolset "clang" -- prefer clang over gcc
filter {}
-- Setup configurations
filter "configurations:Debug"
defines { "DEBUG", "_DEBUG" }
optimize "Off"
filter {"system:windows"}
symbols "Full"
filter {"system:linux"}
symbols "On"
buildoptions "-g3" -- need this for gdb
filter {}
runtime "Debug"
filter {"configurations:Release"}
defines { "NDEBUG" }
optimize "Full"
symbols "Off"
flags {"LinkTimeOptimization"}
filter {}
require "import"
-- For moving the compile commands into the root directory of the project
-- so that autocomplete tools can see them (cquery...)
-- This is messy but was the only way to get it to work consistently
-- across multiple platforms (circleci, windows 10, vsts...)
filter "system:linux"
prebuildcommands {
"{MKDIR} %{wks.location}/compile_commands/",
"{TOUCH} %{wks.location}/compile_commands/%{cfg.shortname}.json",
"{COPY} %{wks.location}/compile_commands/%{cfg.shortname}.json ../compile_commands.json"
}
filter "system:windows"
prebuildcommands {
"cmd.exe /c \"" .. "{MKDIR} %{wks.location}/compile_commands/",
"cmd.exe /c \"" .. "{TOUCH} %{wks.location}/compile_commands/%{cfg.shortname}.json",
"cmd.exe /c \"" .. "{COPY} %{wks.location}/compile_commands/%{cfg.shortname}.json ../compile_commands.json*"
}

3
premake_modules/.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "ghp"]
path = ghp
url = https://github.com/mversluys/premake-ghp.git

44
premake_modules/README.md Normal file
View File

@ -0,0 +1,44 @@
## Generate compile_commands.json for premake projects
This module implements [JSON Compilation Database Format
Specification](http://clang.llvm.org/docs/JSONCompilationDatabase.html) for
premake projects.
Install this module somewhere premake can find it, for example:
```
git clone https://github.com/tarruda/premake-export-compile-commands export-compile-commands
```
Then put this at the top of your system script(eg: ~/.premake/premake-system.lua):
```lua
require "export-compile-commands"
```
Note that while possible, it is not recommended to put the `require` line in
project-specific premake configuration because the "export-compile-commands"
module will need to be installed everywhere your project is built.
After the above steps, the "export-compile-commands" action will be available
for your projects:
```
premake5 export-compile-commands
```
The `export-compile-commands` action will generate one json file per
config/platform combination in each workspace, all under the `compile_commands`
subdirectory. For example, say you have defined `debug` and `release`
configurations with `x32` and `x64` platforms, the output will be something
like:
```
Generated WORKSPACE_BUILD_DIR/compile_commands/debug_x32.json...
Generated WORKSPACE_BUILD_DIR/compile_commands/debug_x64.json...
Generated WORKSPACE_BUILD_DIR/compile_commands/release_x32.json...
Generated WORKSPACE_BUILD_DIR/compile_commands/release_x64.json...
```
where each file contain the compilation commands for the corresponding
config/platform combo.

View File

@ -0,0 +1,4 @@
return {
'_preload.lua',
'export-compile-commands.lua',
}

View File

@ -0,0 +1,34 @@
local p = premake
newaction {
trigger = "cmake",
shortname = "CMake",
description = "Generate classical CMakeLists.txt",
valid_kinds = { "ConsoleApp", "WindowedApp", "StaticLib", "SharedLib" },
valid_languages = { "C", "C++" },
valid_tools = {
cc = { "clang", "gcc" },
},
onWorkspace = function(wks)
p.modules.cmake.generateWorkspace(wks)
end,
onProject = function(prj)
p.modules.cmake.generateProject(prj)
end,
onCleanWorkspace = function(wks)
p.modules.cmake.cleanWorkspace(wks)
end,
onCleanProject = function(prj)
p.modules.cmake.cleanProject(prj)
end,
onCleanTarget = function(prj)
p.modules.cmake.cleanTarget(prj)
end,
}
return function(cfg)
return _ACTION == 'export-compile-commands' or _ACTION == 'cmake'
end

90
premake_modules/cmake.lua Normal file
View File

@ -0,0 +1,90 @@
local p = premake
p.modules.cmake = {}
p.modules.cmake._VERSION = p._VERSION
local cmake = p.modules.cmake
local project = p.project
-- For debug : print table
function cmake.tprint (tbl, indent)
if not indent then indent = 0 end
for k, v in pairs(tbl) do
formatting = string.rep(" ", indent) .. k .. ": "
if type(v) == "table" then
print(table.tostring(v))
-- tprint(v, indent+1)
else
print(formatting .. tostring(v))
end
end
end
function cmake.esc(value)
return value
end
-- Get configuration name
function cmake.cfgname(cfg)
local cfgname = cfg.buildcfg
if cfg.platform then cfgname=cfgname..'_'..cfg.platform end
return cfgname
end
-- Get cmake target name
-- return : <ProjectName>_<Configuration>
-- or return : <ProjectName>_<Configuration>_<Platform>
function cmake.targetname(cfg)
return cfg.project.name..'_'..cmake.cfgname(cfg)
end
-- Generate Workspace
function cmake.generateWorkspace(wks)
p.eol("\r\n")
p.indent(" ")
p.escaper(cmake.esc)
wks.filename = "CMakeLists"
p.generate(wks, ".txt", cmake.workspace.generate)
end
-- Generate Project
function cmake.generateProject(prj)
p.eol("\r\n")
p.indent(" ")
p.escaper(cmake.esc)
if project.iscpp(prj) then
p.generate(prj, ".cmake", cmake.project.generate)
end
end
function cmake.cleanWorkspace(wks)
p.clean.file(wks, wks.name .. ".txt")
end
function cmake.cleanProject(prj)
p.clean.file(prj, prj.name .. ".cmake")
end
function cmake.cleanTarget(prj)
-- TODO..
end
-- Set dependence libs in same workspace
-- param modules : The table for dependences
-- param withPlatform : If contains any platform in this workspace or project
function cmake.linkmodules(modules, withPlatform)
local target_libs = {}
local plat = ''
if withPlatform then plat = '_%{cfg.platform}' end
for k, v in pairs(modules) do
table.insert(target_libs, v..'_%{cfg.buildcfg}'..plat)
end
links(target_libs)
end
include('_preload.lua')
include("cmake_workspace.lua")
include("cmake_project.lua")
include("cmake_configuration.lua")
return cmake

View File

@ -0,0 +1,148 @@
local p = premake
local tree = p.tree
local project = p.project
local config = p.config
local cmake = p.modules.cmake
cmake.config = {}
local m = cmake.config
m.elements = {}
-- Flags
function m.flags(cfg)
local cmakeflags = '-Wall'
local buildType = 'RelWithDebInfo'
if cfg.flags and #cfg.flags > 0 then
for _, flag in ipairs(cfg.flags) do
if flag == 'C++11' then
_p(1, 'set(CMAKE_CXX_STANDARD 11)')
elseif flag == 'C++14' then
_p(1, 'set(CMAKE_CXX_STANDARD 14)')
elseif flag == 'Symbols' then
buildType = 'DebugFull'
elseif flag == 'FatalWarnings' or flag == 'FatalCompileWarnings' then
cmakeflags = cmakeflags..' -Werror'
elseif flag == 'Unicode' then
_p(1,'add_definitions(-DUNICODE -D_UNICODE)')
end
end
end
_p(1, 'set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} %s")', cmakeflags)
_p(1, 'set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} %s")', cmakeflags)
_p(1, 'set(CMAKE_BUILD_TYPE %s)', buildType)
end
-- Add files
function m.files(cfg)
if cfg.files then
_p('')
_p(1, "set(SRC ")
for i,v in ipairs(cfg.files) do
_p(2, project.getrelative(cfg.project, v))
end
_p(1, ")")
end
end
-- Generate Defines
function m.defines(cfg)
if cfg.defines and #cfg.defines then
_p('')
_p(1,'add_definitions(')
for _, define in ipairs(cfg.defines) do
_p(2, '-D%s', define)
end
_p(1,')')
end
end
-- Generate include directories
function m.includedirs(cfg)
if cfg.includedirs and #cfg.includedirs > 0 then
_p('')
_p(1,'set(INCLUD_DIRS ')
for _, includedir in ipairs(cfg.includedirs) do
local dirpath = project.getrelative(cfg.project, includedir)
_p(2, dirpath)
end
_p(1,')')
_p(1,'include_directories(${INCLUD_DIRS})')
end
end
-- Add executable / libs
function m.target(cfg)
local kind = cfg.project.kind
local targetname = cmake.targetname(cfg)
if kind == 'StaticLib' then
_p(1,'add_library( %s STATIC ${SRC})', targetname)
elseif kind == 'SharedLib' then
_p(1,'add_library( %s SHARED ${SRC})', targetname)
elseif kind == 'ConsoleApp' or kind == 'WindowedApp' then
_p(1,'add_executable( %s ${SRC})', targetname)
else
end
end
-- Set targets output properties
function m.targetprops(cfg)
local targetname = cmake.targetname(cfg)
local filename = cfg.filename
if cfg.targetname then filename = cfg.targetname end
if cfg.targetdir and targetname and filename then
_p(1,'set_target_properties( %s ', targetname)
_p(2,'PROPERTIES')
_p(2,'ARCHIVE_OUTPUT_DIRECTORY "%s"', cfg.targetdir)
_p(2,'LIBRARY_OUTPUT_DIRECTORY "%s"', cfg.targetdir)
_p(2,'RUNTIME_OUTPUT_DIRECTORY "%s"', cfg.targetdir)
_p(2,'OUTPUT_NAME "%s"', filename)
_p(1,')')
end
end
-- Set lib directories
function m.libdirs(cfg)
if #cfg.libdirs > 0 then
_p('')
_p(1,'set(LIB_DIRS')
local libdirs = project.getrelative(cfg.project, cfg.libdirs)
for _, libpath in ipairs(libdirs) do
_p(2, libpath)
end
_p(1,')')
_p(1,'link_directories(${LIB_DIRS})')
end
end
-- Set Link libs
function m.links(cfg)
local links = config.getlinks(cfg, "system", "fullpath")
if links and #links>0 then
_p('')
_p(1, 'set(LIBS ')
for _, libname in ipairs(links) do
_p(2, libname)
end
_p(1, ')')
local targetname = cmake.targetname(cfg)
_p(1, 'target_link_libraries(%s ${LIBS})', targetname)
end
end
-- Generate Call array
function m.elements.generate(cfg)
return {
m.flags,
m.defines,
m.includedirs,
m.libdirs,
m.files,
m.target,
m.targetprops,
m.links,
}
end
function m.generate(prj, cfg)
p.callArray(m.elements.generate, cfg)
end

View File

@ -0,0 +1,57 @@
local p = premake
local tree = p.tree
local project = p.project
local config = p.config
local cmake = p.modules.cmake
cmake.project = {}
local m = cmake.project
function cmake.getLinks(cfg)
-- System libraries are undecorated, add the required extension
return config.getlinks(cfg, "system", "fullpath")
end
function cmake.getSiblingLinks(cfg)
-- If we need sibling projects to be listed explicitly, add them on
return config.getlinks(cfg, "siblings", "fullpath")
end
m.elements = {}
m.ctools = {
gcc = "gnu gcc",
clang = "clang",
msc = "Visual C++",
}
m.cxxtools = {
gcc = "gnu g++",
clang = "clang++",
msc = "Visual C++",
}
m.elements.project = function(prj, cfg)
return {
m.files,
}
end
-- Project: Generate the cmake project file.
function m.generate(prj)
p.utf8()
for cfg in project.eachconfig(prj) do
local target = prj.name..'_'..cmake.cfgname(cfg)
local funcn = 'project_'..target
_p('function(%s)', funcn)
-- _p(1, cmake.cfgname(cfg)..'()')
-- p.callArray(m.elements.project, prj, cfg)
cmake.config.generate(prj,cfg)
_p('endfunction(%s)', funcn)
_p(funcn..'()')
_p('')
end
end

View File

@ -0,0 +1,32 @@
local p = premake
local project = p.project
local workspace = p.workspace
local tree = p.tree
local cmake = p.modules.cmake
cmake.workspace = {}
local m = cmake.workspace
-- Generate a cmake workspace
function m.generate(wks)
p.utf8()
_p('# This file is generated by premake5')
_p('cmake_minimum_required(VERSION 3.1)')
_p('set(WORKSPACE_NAME "%s")', wks.name)
_p('project(${WORKSPACE_NAME})')
_p('set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}")')
_p('')
local tr = workspace.grouptree(wks)
tree.traverse(tr, {
onleaf = function(n)
local prj = n.project
_p('include(%s)', prj.name)
end,
onbranch = function(n)
-- TODO: not sure what situation this appears...?
-- premake5.lua emit's one of these for 'contrib', which is a top-level folder with the zip projects
end,
})
end

View File

@ -0,0 +1,147 @@
local p = premake
p.modules.export_compile_commands = {}
local m = p.modules.export_compile_commands
local workspace = p.workspace
local project = p.project
function m.getToolsetName(cfg)
if string.sub(cfg.toolset,1,string.len('msc'))=='msc' then
return 'msc'
end
return 'gcc'
end
function m.getToolset(cfg)
return p.tools['gcc']
end
function m.getIncludeDirs(cfg)
local flags = {}
for _, dir in ipairs(cfg.includedirs) do
table.insert(flags, '-I' .. p.quoted(dir))
end
for _, dir in ipairs(cfg.sysincludedir or {}) do
table.insert(result, '-isystem ' .. p.quoted(dir))
end
return flags
end
function m.getCommonFlags(cfg)
local toolset = m.getToolset(cfg)
local flags = toolset.getcppflags(cfg)
flags = table.join(flags, toolset.getcxxflags(cfg))
flags = table.join(flags, toolset.getdefines(cfg.defines))
flags = table.join(flags, toolset.getundefines(cfg.undefines))
-- can't use toolset.getincludedirs because some tools that consume
-- compile_commands.json have problems with relative include paths
flags = table.join(flags, m.getIncludeDirs(cfg))
flags = table.join(flags, toolset.getcflags(cfg))
return table.join(flags, cfg.buildoptions)
end
function m.getObjectPath(prj, cfg, node)
return path.join(cfg.objdir, path.appendExtension(node.objname, '.o'))
end
function m.getDependenciesPath(prj, cfg, node)
return path.join(cfg.objdir, path.appendExtension(node.objname, '.d'))
end
function m.getFileFlags(prj, cfg, node)
return table.join(m.getCommonFlags(cfg), {
'-o', m.getObjectPath(prj, cfg, node),
'-MF', m.getDependenciesPath(prj, cfg, node),
'-c', node.abspath
})
end
function m.generateCompileCommand(prj, cfg, node)
extra = ''
if m.getToolsetName(cfg) == 'msc' then
extra = extra .. "-fms-compatibility -fdelayed-template-parsing "
end
return {
directory = prj.location,
file = node.abspath,
command = 'cc '.. extra .. table.concat(m.getFileFlags(prj, cfg, node), ' ')
}
end
function m.includeFile(prj, node, depth)
return path.iscppfile(node.abspath)
end
function m.getConfig(prj)
if _OPTIONS['export-compile-commands-config'] then
return project.getconfig(prj, _OPTIONS['export-compile-commands-config'],
_OPTIONS['export-compile-commands-platform'])
end
for cfg in project.eachconfig(prj) do
-- just use the first configuration which is usually "Debug"
return cfg
end
end
function m.getProjectCommands(prj, cfg)
local tr = project.getsourcetree(prj)
local cmds = {}
p.tree.traverse(tr, {
onleaf = function(node, depth)
if not m.includeFile(prj, node, depth) then
return
end
table.insert(cmds, m.generateCompileCommand(prj, cfg, node))
end
})
return cmds
end
local function execute()
for wks in p.global.eachWorkspace() do
local cfgCmds = {}
for prj in workspace.eachproject(wks) do
for cfg in project.eachconfig(prj) do
local cfgKey = string.format('%s', cfg.shortname)
if not cfgCmds[cfgKey] then
cfgCmds[cfgKey] = {}
end
cfgCmds[cfgKey] = table.join(cfgCmds[cfgKey], m.getProjectCommands(prj, cfg))
end
end
for cfgKey,cmds in pairs(cfgCmds) do
local outfile = string.format('compile_commands/%s.json', cfgKey)
p.generate(wks, outfile, function(wks)
p.w('[')
for i = 1, #cmds do
local item = cmds[i]
local command = string.format([[
{
"directory": "%s",
"file": "%s",
"command": "%s"
}]],
item.directory,
item.file,
item.command:gsub('\\', '\\\\'):gsub('"', '\\"'))
if i > 1 then
p.w(',')
end
p.w(command)
end
p.w(']')
end)
end
end
end
newaction {
trigger = 'export-compile-commands',
description = 'Export compiler commands in JSON Compilation Database Format',
execute = execute
}
return m

View File

@ -0,0 +1,42 @@
# Premake GitHub packages
An extension to premake for consuming packages from GitHub repositories.
This extension makes it easy to share and consume C/C++ projects!
Import this extension by placing it somewhere that premake can find it then use.
```lua
require 'ghp'
```
Import packages using the `ghp.import` function in your workspace which refers to a GitHub organization/repository and release.
```lua
ghp.import('mversluys/protobuf', '2.6.1')
```
Pull include directies and libraries into your projects with `ghp.use`.
```lua
ghp.use('mversluys/protobuf')
```
You can also require premake modules directly from github with `ghp.require`.
```lua
ghp.require('mversluys/premake-autoconf', '0.1')
```
For more information, including how to publish your own packages, see the [wiki](https://github.com/mversluys/premake-ghp/wiki).
For a working sample project, see [premake-ghp-sample](https://github.com/mversluys/premake-ghp-sample).
A public index of available packages can be found here http://www.premake-ghp.com.
## Acknowledgements
This extension utilizes the following Lua packages
* [lunajson](https://github.com/grafi-tt/lunajson) -- JSON parser/encoder/decoder
* [semver](https://github.com/kikito/semver.lua) -- Semantic Versioning

View File

@ -0,0 +1,17 @@
---
-- github-package/_manifest.lua
-- Premake package management extension
-- Copyright (c) 2015 Matthew Versluys
---
return {
'_preload.lua',
'ghp.lua',
'lunajson.lua',
'semver.lua',
'lunajason/_str_lib.lua',
'lunajason/_str_lib_lua53.lua',
'lunajason/decoder.lua',
'lunajason/encoder.lua',
'lunajason/sax.lua',
}

View File

@ -0,0 +1,11 @@
---
-- github-package/_preload.lua
-- Premake package management extension
-- Copyright (c) 2015 Matthew Versluys
---
include('ghp.lua')
return function(cfg)
return true
end

743
premake_modules/ghp/ghp.lua Normal file
View File

@ -0,0 +1,743 @@
---
-- ghp/ghp.lua
-- Premake GitHub package management extension
-- Copyright (c) 2015 Matthew Versluys
---
print 'GitHub Package module ... (ghp)'
json = require 'lunajson'
semver = require 'semver'
premake.modules.ghp = {}
ghp = premake.modules.ghp
ghp._VERSION = "0.8.0"
newoption {
trigger = "ghp-api",
value = "URL",
description = "The URL of the GitHub API to use. Change to retrieve from GitHub enterprise."
}
newoption {
trigger = "ghp-cache",
value = "DIRECTORY",
description = "Directory to use for the package cache."
}
newoption {
trigger = "ghp-environment",
value = "FILE",
description = "File to write environment variables into."
}
newoption {
trigger = "ghp-index",
value = "URL",
description = "The url of the premake-ghp index. Change to use an internal index."
}
newoption {
trigger = "ghp-user",
value = "USERNAME[:PASSWORD]",
description = "The user name and optional password used to retrieve packages from GitHub"
}
ghp.packages = {}
ghp.current = nil
ghp.api = nil
ghp.cache = nil
ghp.environment = nil
ghp.environment_file = nil
ghp.index = nil
ghp.user = nil
ghp.consumer_organization = nil
ghp.consumer_repository = nil
ghp.local_packages = { 'ghp_local' }
local function _local_packages()
if type(ghp.local_packages) == 'string' then
return { ghp.local_packages }
else
return ghp.local_packages
end
end
local function _get_api()
if ghp.api then
return ghp.api
end
-- check for command line
if _OPTIONS['ghp-api'] then
ghp.api = _OPTIONS['ghp-api']
else
-- check for environment variable
local env = os.getenv('GHP_API')
if env then
ghp.api = env
return ghp.api
else
-- use default url
ghp.api = 'https://api.github.com'
end
end
verbosef(' API URL %s', ghp.api)
return ghp.api
end
local function _get_cache()
if ghp.cache then
return ghp.cache
end
-- check for command line
if _OPTIONS['ghp-cache'] then
ghp.cache = _OPTIONS['ghp-cache']
else
-- check envronment variable
local env = os.getenv('GHP_CACHE')
if env then
ghp.cache = env
else
-- use default location
if os.get() == 'windows' then
local temp = os.getenv('TEMP')
if temp then
ghp.cache = path.join(temp, 'ghp_cache')
else
ghp.cache = 'c:\\temp'
end
else
-- assume that we're on something that's using a standard file system heirachy
ghp.cache = '/var/tmp/ghp_cache'
end
end
end
verbosef(' CACHE LOCATION: %s', ghp.cache)
return ghp.cache
end
local function _get_user()
if ghp.user then
return ghp.user
end
local user = nil
-- check for command line
if _OPTIONS['ghp-user'] then
user = _OPTIONS['ghp-user']
else
-- check for environment variable
user = os.getenv('GHP_USER')
end
if not user then
print('Authentication to GitHub api ' .. _get_api() .. ' required')
local guess = os.getenv('USER')
if not guess then
guess = os.getenv('USERNAME')
end
if guess then
io.write('Enter username [' .. guess .. ']: ')
else
io.write('Enter username: ')
end
user = io.read()
if user == '' then
user = guess
end
end
if user:find(':') then
ghp.user = user
else
ghp.user = user .. ':' .. os.getpass('Enter password for user "' .. user .. '": ')
end
return ghp.user
end
local function _get_environment()
if ghp.environment_file then
return ghp.environment_file
end
local filename = ghp.environment
-- check for command line
if _OPTIONS['ghp-environment'] then
filename = _OPTIONS['ghp-environment']
elseif os.getenv('GHP_ENVIRONMENT') then
-- check for environment variable
filename = os.getenv('GHP_ENVIRONMENT')
end
-- if we found a filename, open the file
if filename then
verbosef(' ENVIRONMENT FILE %s', filename)
ghp.environment = filename
ghp.environment_file = io.open(filename, 'w')
end
return ghp.environment_file
end
local function _get_index()
if ghp.index then
return ghp.index
end
-- check for command line
if _OPTIONS['ghp-index'] then
ghp.index = _OPTIONS['ghp-index']
else
-- check for environment variable
local env = os.getenv('GHP_INDEX')
if env then
ghp.index = env
return ghp.index
else
-- use default url
ghp.index = 'http://www.premake-ghp.com'
end
end
verbosef(' INDEX URL %s', ghp.index)
return ghp.index
end
local function _http_progress(total, current)
local width = 78
local progress = math.floor(current * width / total)
if progress == width then
io.write(string.rep(' ', width + 2) .. '\r')
else
io.write('[' .. string.rep('=', progress) .. string.rep(' ', width - progress) .. ']\r')
end
end
local function _http_get(url, context)
local result, result_str, result_code = http.get(url, { progress = _http_progress })
if result_code == 401 then
result, result_str, result_code = http.get(url, { userpwd = _get_user(), progress = _http_progress })
end
if not result then
premake.error('%s retrieval of %s failed (%d)\n%s', context, url, result_code, result_str)
end
return result
end
local function _http_download(url, destination, context)
local result_str, result_code = http.download(url, destination, { progress = _http_progress, headers = { 'Accept: application/octet-stream' } })
if result_code == 401 then
result_str, result_code = http.download(url, destination, { userpwd = _get_user(), progress = _http_progress, headers = { 'Accept: application/octet-stream' } })
end
if result_str ~= "OK" then
premake.error('%s retrieval of %s failed (%d)\n%s', context, url, result_code, result_str)
end
end
local function _index_use(organization, repository, release, cached)
if cached then
cached = '&cached=1'
else
cached = ''
end
if ghp.consumer_organization and ghp.consumer_repository then
local url = _get_index() .. '/api/use/' ..
organization .. '/' .. repository .. '/' .. release ..
'?consumer=' .. ghp.consumer_organization .. '/' .. ghp.consumer_repository ..
cached
verbosef(' INDEX USE: %s', url)
http.get(url, { timeout = 5 })
end
end
local function _download_release(organization, repository, release, context)
local p = path.normalize(path.join(organization, repository, release, 'release'))
-- see if the file exists locally
for _, folder in ipairs(_local_packages()) do
local location = path.join(folder, p)
if os.isdir(location) then
verbosef(' LOCAL: %s', location)
return location
end
end
-- see if it's cached
local location = path.join(_get_cache(), p)
if os.isdir(location) then
verbosef(' CACHED: %s', location)
_index_use(organization, repository, release, true)
return location
end
-- try to download it
local api_url = _get_api() .. '/repos/' .. organization .. '/' .. repository .. '/releases/tags/' .. release
printf(' INFO: %s', api_url)
local release_json = _http_get(api_url, context)
local source = json.decode(release_json)['zipball_url']
local destination = location .. '.zip'
printf(' DOWNLOAD: %s', source)
os.mkdir(path.getdirectory(destination))
_http_download(source, destination, context)
-- unzip it
verbosef(' UNZIP: %s', destination)
zip.extract(destination, location)
-- GitHub puts an extra folder in the archive, if we can find it, let's remove it
-- TODO: figure out how to request the archive from GitHub without the extra folder
local cruft = os.matchdirs(path.join(location, organization .. '-' .. repository .. '-*'))
if #cruft == 1 then
local cruft_path = cruft[1]
-- what we want to do is rename cruft_path to location
-- because it's inside of location we need to move it out of location
verbosef(' CLEANING: %s', cruft_path)
os.rename(cruft_path, location .. '-temp')
-- remove the old location
os.rmdir(location)
-- then replace it with the new one
os.rename(location .. '-temp', location)
end
-- remove the downloaded file
os.remove(destination)
_index_use(organization, repository, release, false)
return location
end
local function _download_asset(organization, repository, release, asset, context)
local f = asset
local p = path.normalize(path.join(organization, repository, release, 'assets', f))
local d = p
local check = os.isfile
-- is this a zip file?
if path.getextension(f) == '.zip' then
f = path.getbasename(f)
d = path.normalize(path.join(organization, repository, release, 'assets', f))
check = os.isdir
end
-- see if it the file exists locally
for _, folder in ipairs(_local_packages()) do
local location = path.join(folder, d)
if check(location) then
verbosef(' LOCAL: %s', location)
return location
end
end
-- see if it's cached
local location = path.join(_get_cache(), d)
if check(location) then
verbosef(' CACHED: %s', location)
return location
end
-- try to download it
local api_url = _get_api() .. '/repos/' .. organization .. '/' .. repository .. '/releases/tags/' .. release
printf(' INFO: %s', api_url)
local release_json = _http_get(api_url, context)
local release_info = json.decode(release_json)
local asset_id = nil
for _, asset_info in ipairs(release_info['assets']) do
if asset_info['name'] == asset then
asset_id = asset_info['id']
break
end
end
if not asset_id then
premake.error('%s unable to find asset named %s', context, asset)
end
local asset_url = _get_api() .. '/repos/' .. organization .. '/' .. repository .. '/releases/assets/' .. asset_id
-- try to download it
local destination = path.join(_get_cache(), p)
printf(' DOWNLOAD: %s', asset_url)
os.mkdir(path.getdirectory(destination))
_http_download(asset_url, destination, context)
-- if it's a zip, unzip it
if path.getextension(asset) == '.zip' then
verbosef(' UNZIP: %s %s', destination, location)
zip.extract(destination, location)
os.remove(destination)
end
return location
end
local function _export(to, to_name, paths, label, isrelative)
if type(paths) ~= 'table' then
paths = { paths }
end
-- capture the current premake filter
local filter = premake.configset.getFilter(premake.api.scope.current)
-- iterate the paths and save them
for _, p in ipairs(paths) do
if isrelative then
p = path.getabsolute(p)
end
verbosef(' EXPORT: %s %s', to_name, p)
table.insert(to, { label, filter, p })
end
end
local function _label_test(label, label_filter)
-- if no filter was provided, success!
if not label_filter then
return true
end
-- if the filter is a table, check to see if the label is in it
if type(label_filter) == 'table' then
for _, l in ipairs(label_filter) do
if label == l then
return true
end
end
end
-- otherwise it needs to be an exact match
return label_filter == label
end
local function _import(package, label_filter, func, func_name)
-- preserve the current premake filter
local filter = premake.configset.getFilter(premake.api.scope.current)
-- resolve the package
for _, i in ipairs(package[func_name]) do
if _label_test(i[1], label_filter) then
verbosef(' IMPORT: %s %s %s', func_name, package.name, i[3])
-- apply the filter that was captured at export
premake.configset.setFilter(premake.api.scope.current, i[2])
-- call the function with the parameter that was captured at export
func { i[3] }
end
end
-- restore the current premake filter
premake.configset.setFilter(premake.api.scope.current, filter)
end
-- functions used inside of premake5-ghp.lua
function ghp.export_includedirs(paths, label)
if not ghp.current then
premake.error('ghp.export_includedirs can only be used inside of packages')
end
_export(ghp.current.includedirs, 'includedirs', paths, label, true)
end
-- libdirs shouldn't be neccesary, all exported library references "should" be absolute
--function package_export_libdirs(paths, label)
-- if not ghp.current then
-- premake.error('ghp.export_includedirs can only be used inside of packages')
-- end
-- _export(ghp.current.libdirs, 'libdirs', paths, label, true)
--end
function ghp.export_library(paths, label)
if not ghp.current then
premake.error('ghp.export_includedirs can only be used inside of packages')
end
_export(ghp.current.links, 'links', paths, label, true)
end
function ghp.export_project(paths, label)
if not ghp.current then
premake.error('ghp.export_project can only be used inside of packages')
end
_export(ghp.current.links, 'links', paths, label, false)
end
function ghp.asset(file, name)
if not ghp.current then
premake.error('ghp.asset can only be used inside of packages')
end
if not name then
if path.getextension(file) == '.zip' then
name = path.getbasename(file)
else
name = file
end
end
verbosef(' ASSET: %s %s', name, file)
local package = ghp.current
local context = string.format('ghp.asset %s %s %s', package.name, package.release, file)
local asset_path = _download_asset(package.organization, package.repository, package.release, file, context)
-- add to the environment file
local env = _get_environment()
if env then
env:write(
'GHP_' .. string.upper(package.organization) ..
'_' .. string.upper(package.repository) ..
'_' .. string.upper(name) ..
'="' .. path.getabsolute(asset_path) .. '"\n')
end
return asset_path
end
-- functions used by consumers of packages
function ghp.includedirs(package_name, label_filter)
local package = ghp.packages[package_name]
if package then
_import(package, label_filter, includedirs, 'includedirs')
else
premake.error('ghp.includedirs could not resolve package name %s', package_name)
end
end
function ghp.links(package_name, label_filter)
local package = ghp.packages[package_name]
if package then
_import(package, label_filter, links, 'links')
else
premake.error('ghp.links could not resolve package name %s', package_name)
end
end
function ghp.use(package_name, label_filter)
local package = ghp.packages[package_name]
if package then
_import(package, label_filter, includedirs, 'includedirs')
_import(package, label_filter, links, 'links')
else
premake.error('ghp.use could not resolve package name %s', package_name)
end
end
-- specify who is using packages
function ghp.consumer(name)
if ghp.current then
premake.error('ghp.consumer can not be used inside of packages, currently in package %s', ghp.current.name)
end
-- the name should contain the organization and repository
local organization, repository = name:match('(%S+)/(%S+)')
if not organization or not repository then
premake.error('ghp.consumer expected name to contain organization/repository but found %s', name)
end
ghp.consumer_organization = organization
ghp.consumer_repository = repository
end
-- load a premake module given a name and release
function ghp.require(name, release, versions)
-- the name should contain the organization and repository
local organization, repository = name:match('(%S+)/(%S+)')
if not organization or not repository then
premake.error('ghp.require expected name to contain organization/repository but found %s', name)
end
-- the last part of the name is the module name
local module_name = name:match('premake%-(%S+)$')
if not module_name then
premake.error('ghp.require expected name %s to end in premake-{name}', name)
end
local context = string.format('ghp.require %s %s', name, release)
printf('%s (%s)', context, module_name)
-- download the module
local directory = _download_release(organization, repository, release, context)
-- push the current path
local path = premake.path
-- set the path to the inside of the cache and load the module
premake.path = directory
require(module_name, versions)
-- pop the previous path
premake.path = path
end
-- import a package given a name and release
function ghp.import(name, release)
if ghp.current then
premake.error('ghp.import can not be used inside of packages, currently in package %s', ghp.current.name)
end
-- has this package already been imported?
if ghp.packages[name] then
premake.error('ghp.import package %s has already been imported', name)
end
-- the name should contain the organization and repository
local organization, repository = name:match('(%S+)/(%S+)')
if not organization or not repository then
premake.error('ghp.import expected name to contain organization/repository but found %s', name)
end
-- version is the numerical and dotted part of the release
local version = release:match('(%d[%d\\\.]+%d)')
local context = string.format('ghp.import %s %s', name, release)
printf('%s (%s)', context, version)
local directory = _download_release(organization, repository, release, context)
-- create the package
local package = {
name = name,
revision = revision,
organization = organization,
repository = repository,
release = release,
version = version,
location = nil,
includedirs = {},
links = {},
libdirs = {},
}
-- add to the environment file
local env = _get_environment()
if env then
env:write(
'GHP_' .. string.upper(organization) ..
'_' .. string.upper(repository) ..
'="' .. path.getabsolute(directory) .. '"\n')
end
ghp.current = package
-- look for the premake package file
local path_premake = path.join(directory, 'premake5-ghp.lua')
if os.isfile(path_premake) then
package.func = dofile(path_premake)
end
ghp.current = nil
-- save in the package registry
ghp.packages[name] = package
end
-- declare a dependency on another package having been imported
function ghp.dependency(name, version)
if not ghp.current then
premake.error('ghp.dependency can only be used inside of packages')
end
local operator, version = version:match('(.)(.+)')
verbosef(' DEPENDENCY: %s %s%s', name, operator, version)
-- do we have this package?
local package = ghp.packages[name]
if not package then
premake.error('ghp.dependency package %s depends on %s %s %s package not found', ghp.current.name, name, operator, version)
end
local current_version = semver(package.version)
local compare_version = semver(version)
if operator == '=' then
-- looking for an exact match
if compare_version == current_version then
return
else
premake.error('ghp.dependency package %s depends on %s =%s found version %s', ghp.current.name, name, version, package.version)
end
end
if operator == '>' then
-- looking for a comparison
if compare_version < current_version then
return
else
premake.error('ghp.dependency package %s depends on %s >%s found version %s', ghp.current.name, name, version, package.version)
end
end
if operator == '^' then
-- looking for persimistic upgrade
if compare_version ^ current_version then
return
else
premake.error('ghp.dependency package %s depends on %s ^%s found version %s', ghp.current.name, name, version, package.version)
end
end
premake.error('ghp.dependency package %s has unknown comparison operator %s when depending on %s %s', ghp.current.name, operator, name, version)
end
---
-- override 'project' so that when a package defines a new project we initialize it with some default values.
---
premake.override(premake.project, 'new', function(base, name)
local project = base(name)
-- place the project in a group named ghp
project.group = 'ghp'
return project
end)
return ghp

View File

@ -0,0 +1,11 @@
local newdecoder = require 'lunajson.decoder'
local newencoder = require 'lunajson.encoder'
local sax = require 'lunajson.sax'
-- If you need multiple contexts of decoder and/or encoder,
-- you can require lunajson.decoder and/or lunajson.encoder directly.
return {
decode = newdecoder(),
encode = newencoder(),
newparser = sax.newparser,
newfileparser = sax.newfileparser,
}

View File

@ -0,0 +1,86 @@
local inf = math.huge
local byte, char, sub = string.byte, string.char, string.sub
local setmetatable = setmetatable
local floor = math.floor
local _ENV = nil
local hextbl = {
0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, inf, inf, inf, inf, inf, inf,
inf, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF, inf, inf, inf, inf, inf, inf, inf, inf, inf,
inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf,
inf, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF, inf, inf, inf, inf, inf, inf, inf, inf, inf,
}
hextbl.__index = function()
return inf
end
setmetatable(hextbl, hextbl)
return function(myerror)
local escapetbl = {
['"'] = '"',
['\\'] = '\\',
['/'] = '/',
['b'] = '\b',
['f'] = '\f',
['n'] = '\n',
['r'] = '\r',
['t'] = '\t'
}
escapetbl.__index = function()
myerror("invalid escape sequence")
end
setmetatable(escapetbl, escapetbl)
local surrogateprev = 0
local function subst(ch, rest)
-- 0.000003814697265625 = 2^-18
-- 0.000244140625 = 2^-12
-- 0.015625 = 2^-6
local u8
if ch == 'u' then
local c1, c2, c3, c4 = byte(rest, 1, 4)
local ucode = hextbl[c1-47] * 0x1000 + hextbl[c2-47] * 0x100 + hextbl[c3-47] * 0x10 + hextbl[c4-47]
if ucode == inf then
myerror("invalid unicode charcode")
end
rest = sub(rest, 5)
if ucode < 0x80 then -- 1byte
u8 = char(ucode)
elseif ucode < 0x800 then -- 2byte
u8 = char(0xC0 + floor(ucode * 0.015625), 0x80 + ucode % 0x40)
elseif ucode < 0xD800 or 0xE000 <= ucode then -- 3byte
u8 = char(0xE0 + floor(ucode * 0.000244140625), 0x80 + floor(ucode * 0.015625) % 0x40, 0x80 + ucode % 0x40)
elseif 0xD800 <= ucode and ucode < 0xDC00 then -- surrogate pair 1st
if surrogateprev == 0 then
surrogateprev = ucode
if rest == '' then
return ''
end
end
else -- surrogate pair 2nd
if surrogateprev == 0 then
surrogateprev = 1
else
ucode = 0x10000 + (surrogateprev - 0xD800) * 0x400 + (ucode - 0xDC00)
surrogateprev = 0
u8 = char(0xF0 + floor(ucode * 0.000003814697265625), 0x80 + floor(ucode * 0.000244140625) % 0x40, 0x80 + floor(ucode * 0.015625) % 0x40, 0x80 + ucode % 0x40)
end
end
end
if surrogateprev ~= 0 then
myerror("invalid surrogate pair")
end
return (u8 or escapetbl[ch]) .. rest
end
local function surrogateok()
return surrogateprev == 0
end
return {
subst = subst,
surrogateok = surrogateok
}
end

View File

@ -0,0 +1,83 @@
local inf = math.huge
local byte, char, sub = string.byte, string.char, string.sub
local setmetatable = setmetatable
local _ENV = nil
local hextbl = {
0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, inf, inf, inf, inf, inf, inf,
inf, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF, inf, inf, inf, inf, inf, inf, inf, inf, inf,
inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf,
inf, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF, inf, inf, inf, inf, inf, inf, inf, inf, inf,
}
hextbl.__index = function()
return inf
end
setmetatable(hextbl, hextbl)
return function(myerror)
local escapetbl = {
['"'] = '"',
['\\'] = '\\',
['/'] = '/',
['b'] = '\b',
['f'] = '\f',
['n'] = '\n',
['r'] = '\r',
['t'] = '\t'
}
escapetbl.__index = function()
myerror("invalid escape sequence")
end
setmetatable(escapetbl, escapetbl)
local surrogateprev = 0
local function subst(ch, rest)
local u8
if ch == 'u' then
local c1, c2, c3, c4 = byte(rest, 1, 4)
-- multiplications should not be lshift since cn may be inf
local ucode = hextbl[c1-47] * 0x1000 + hextbl[c2-47] * 0x100 + hextbl[c3-47] * 0x10 + hextbl[c4-47]
if ucode == inf then
myerror("invalid unicode charcode")
end
rest = sub(rest, 5)
if ucode < 0x80 then -- 1byte
u8 = char(ucode)
elseif ucode < 0x800 then -- 2byte
u8 = char(0xC0 + (ucode >> 6), 0x80 + (ucode & 0x3F))
elseif ucode < 0xD800 or 0xE000 <= ucode then -- 3byte
u8 = char(0xE0 + (ucode >> 12), 0x80 + (ucode >> 6 & 0x3F), 0x80 + (ucode & 0x3F))
elseif 0xD800 <= ucode and ucode < 0xDC00 then -- surrogate pair 1st
if surrogateprev == 0 then
surrogateprev = ucode
if rest == '' then
return ''
end
end
else -- surrogate pair 2nd
if surrogateprev == 0 then
surrogateprev = 1
else
ucode = 0x10000 + (surrogateprev - 0xD800 << 10) + (ucode - 0xDC00)
surrogateprev = 0
u8 = char(0xF0 + (ucode >> 18), 0x80 + (ucode >> 12 & 0x3F), 0x80 + (ucode >> 6 & 0x3F), 0x80 + (ucode & 0x3F))
end
end
end
if surrogateprev ~= 0 then
myerror("invalid surrogate pair")
end
return (u8 or escapetbl[ch]) .. rest
end
local function surrogateok()
return surrogateprev == 0
end
return {
subst = subst,
surrogateok = surrogateok
}
end

View File

@ -0,0 +1,364 @@
local error = error
local byte, char, find, gsub, match, sub = string.byte, string.char, string.find, string.gsub, string.match, string.sub
local tonumber = tonumber
local tostring, setmetatable = tostring, setmetatable
-- The function that interprets JSON strings is separated into another file so as to
-- use bitwise operation to speedup unicode codepoints processing on Lua 5.3.
local genstrlib
if _VERSION == "Lua 5.3" then
genstrlib = include '_str_lib_lua53.lua'
else
genstrlib = include '_str_lib.lua'
end
local _ENV = nil
local function newdecoder()
local json, pos, nullv, arraylen
-- `f` is the temporary for dispatcher[c] and
-- the dummy for the first return value of `find`
local dispatcher, f
--[[
Helper
--]]
local function decodeerror(errmsg)
error("parse error at " .. pos .. ": " .. errmsg)
end
--[[
Invalid
--]]
local function f_err()
decodeerror('invalid value')
end
--[[
Constants
--]]
-- null
local function f_nul()
if sub(json, pos, pos+2) == 'ull' then
pos = pos+3
return nullv
end
decodeerror('invalid value')
end
-- false
local function f_fls()
if sub(json, pos, pos+3) == 'alse' then
pos = pos+4
return false
end
decodeerror('invalid value')
end
-- true
local function f_tru()
if sub(json, pos, pos+2) == 'rue' then
pos = pos+3
return true
end
decodeerror('invalid value')
end
--[[
Numbers
Conceptually, the longest prefix that matches to `-?(0|[1-9][0-9]*)(\.[0-9]*)?([eE][+-]?[0-9]*)?`
(in regexp) is captured as a number and its conformance to the JSON spec is checked.
--]]
-- deal with non-standard locales
local radixmark = match(tostring(0.5), '[^0-9]')
local fixedtonumber = tonumber
if radixmark ~= '.' then
if find(radixmark, '%W') then
radixmark = '%' .. radixmark
end
fixedtonumber = function(s)
return tonumber(gsub(s, '.', radixmark))
end
end
local function error_number()
decodeerror('invalid number')
end
-- `0(\.[0-9]*)?([eE][+-]?[0-9]*)?`
local function f_zro(mns)
local postmp = pos
local num
local c = byte(json, postmp)
if not c then
return error_number()
end
if c == 0x2E then -- is this `.`?
num = match(json, '^.[0-9]*', pos) -- skipping 0
local numlen = #num
if numlen == 1 then
return error_number()
end
postmp = pos + numlen
c = byte(json, postmp)
end
if c == 0x45 or c == 0x65 then -- is this e or E?
local numexp = match(json, '^[^eE]*[eE][-+]?[0-9]+', pos)
if not numexp then
return error_number()
end
if num then -- since `0e.*` is always 0.0, ignore those
num = numexp
end
postmp = pos + #numexp
end
pos = postmp
if num then
num = fixedtonumber(num)
else
num = 0.0
end
if mns then
num = -num
end
return num
end
-- `[1-9][0-9]*(\.[0-9]*)?([eE][+-]?[0-9]*)?`
local function f_num(mns)
pos = pos-1
local num = match(json, '^.[0-9]*%.?[0-9]*', pos)
if byte(num, -1) == 0x2E then
return error_number()
end
local postmp = pos + #num
local c = byte(json, postmp)
if c == 0x45 or c == 0x65 then -- e or E?
num = match(json, '^[^eE]*[eE][-+]?[0-9]+', pos)
if not num then
return error_number()
end
postmp = pos + #num
end
pos = postmp
num = fixedtonumber(num)-0.0
if mns then
num = -num
end
return num
end
-- skip minus sign
local function f_mns()
local c = byte(json, pos)
if c then
pos = pos+1
if c > 0x30 then
if c < 0x3A then
return f_num(true)
end
else
if c > 0x2F then
return f_zro(true)
end
end
end
decodeerror('invalid number')
end
--[[
Strings
--]]
local f_str_lib = genstrlib(decodeerror)
local f_str_surrogateok = f_str_lib.surrogateok -- whether codepoints for surrogate pair are correctly paired
local f_str_subst = f_str_lib.subst -- the function passed to gsub that interprets escapes
-- caching interpreted keys for speed
local f_str_keycache = setmetatable({}, {__mode="v"})
local function f_str(iskey)
local newpos = pos-2
local pos2 = pos
local c1, c2
repeat
newpos = find(json, '"', pos2, true) -- search '"'
if not newpos then
decodeerror("unterminated string")
end
pos2 = newpos+1
while true do -- skip preceding '\\'s
c1, c2 = byte(json, newpos-2, newpos-1)
if c2 ~= 0x5C or c1 ~= 0x5C then
break
end
newpos = newpos-2
end
until c2 ~= 0x5C -- check '"' is not preceded by '\'
local str = sub(json, pos, pos2-2)
pos = pos2
if iskey then -- check key cache
local str2 = f_str_keycache[str]
if str2 then
return str2
end
end
local str2 = str
if find(str2, '\\', 1, true) then -- check if backslash occurs
str2 = gsub(str2, '\\(.)([^\\]*)', f_str_subst) -- interpret escapes
if not f_str_surrogateok() then
decodeerror("invalid surrogate pair")
end
end
if iskey then -- commit key cache
f_str_keycache[str] = str2
end
return str2
end
--[[
Arrays, Objects
--]]
-- array
local function f_ary()
local ary = {}
f, pos = find(json, '^[ \n\r\t]*', pos)
pos = pos+1
local i = 0
if byte(json, pos) ~= 0x5D then -- check closing bracket ']', that consists an empty array
local newpos = pos-1
repeat
i = i+1
f = dispatcher[byte(json,newpos+1)] -- parse value
pos = newpos+2
ary[i] = f()
f, newpos = find(json, '^[ \n\r\t]*,[ \n\r\t]*', pos) -- check comma
until not newpos
f, newpos = find(json, '^[ \n\r\t]*%]', pos) -- check closing bracket
if not newpos then
decodeerror("no closing bracket of an array")
end
pos = newpos
end
pos = pos+1
if arraylen then -- commit the length of the array if `arraylen` is set
ary[0] = i
end
return ary
end
-- objects
local function f_obj()
local obj = {}
f, pos = find(json, '^[ \n\r\t]*', pos)
pos = pos+1
if byte(json, pos) ~= 0x7D then -- check the closing bracket '}', that consists an empty object
local newpos = pos-1
repeat
pos = newpos+1
if byte(json, pos) ~= 0x22 then -- check '"'
decodeerror("not key")
end
pos = pos+1
local key = f_str(true) -- parse key
-- optimized for compact json
-- c1, c2 == ':', <the first char of the value> or
-- c1, c2, c3 == ':', ' ', <the first char of the value>
f = f_err
do
local c1, c2, c3 = byte(json, pos, pos+3)
if c1 == 0x3A then
newpos = pos
if c2 == 0x20 then
newpos = newpos+1
c2 = c3
end
f = dispatcher[c2]
end
end
if f == f_err then -- read a colon and arbitrary number of spaces
f, newpos = find(json, '^[ \n\r\t]*:[ \n\r\t]*', pos)
if not newpos then
decodeerror("no colon after a key")
end
end
f = dispatcher[byte(json, newpos+1)] -- parse value
pos = newpos+2
obj[key] = f()
f, newpos = find(json, '^[ \n\r\t]*,[ \n\r\t]*', pos)
until not newpos
f, newpos = find(json, '^[ \n\r\t]*}', pos)
if not newpos then
decodeerror("no closing bracket of an object")
end
pos = newpos
end
pos = pos+1
return obj
end
--[[
The jump table to dispatch a parser for a value, indexed by the code of the value's first char.
Nil key means the end of json.
--]]
dispatcher = {
f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err,
f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err,
f_err, f_err, f_str, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_mns, f_err, f_err,
f_zro, f_num, f_num, f_num, f_num, f_num, f_num, f_num, f_num, f_num, f_err, f_err, f_err, f_err, f_err, f_err,
f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err,
f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_ary, f_err, f_err, f_err, f_err,
f_err, f_err, f_err, f_err, f_err, f_err, f_fls, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_nul, f_err,
f_err, f_err, f_err, f_err, f_tru, f_err, f_err, f_err, f_err, f_err, f_err, f_obj, f_err, f_err, f_err, f_err,
}
dispatcher[0] = f_err
dispatcher.__index = function()
decodeerror("unexpected termination")
end
setmetatable(dispatcher, dispatcher)
--[[
run decoder
--]]
local function decode(json_, pos_, nullv_, arraylen_)
json, pos, nullv, arraylen = json_, pos_, nullv_, arraylen_
pos = pos or 1
f, pos = find(json, '^[ \n\r\t]*', pos)
pos = pos+1
f = dispatcher[byte(json, pos)]
pos = pos+1
local v = f()
if pos_ then
return v, pos
else
f, pos = find(json, '^[ \n\r\t]*', pos)
if pos ~= #json then
error('json ended')
end
return v
end
end
return decode
end
return newdecoder

View File

@ -0,0 +1,184 @@
local error = error
local byte, find, format, gsub, match = string.byte, string.find, string.format, string.gsub, string.match
local concat = table.concat
local tostring = tostring
local pairs, type = pairs, type
local setmetatable = setmetatable
local huge, tiny = 1/0, -1/0
local f_string_pat
if _VERSION == "Lua 5.1" then
-- use the cluttered pattern because lua 5.1 does not handle \0 in a pattern correctly
f_string_pat = '[^ -!#-[%]^-\255]'
else
f_string_pat = '[\0-\31"\\]'
end
local _ENV = nil
local function newencoder()
local v, nullv
local i, builder, visited
local function f_tostring(v)
builder[i] = tostring(v)
i = i+1
end
local radixmark = match(tostring(0.5), '[^0-9]')
local delimmark = match(tostring(12345.12345), '[^0-9' .. radixmark .. ']')
if radixmark == '.' then
radixmark = nil
end
local radixordelim
if radixmark or delimmark then
radixordelim = true
if radixmark and find(radixmark, '%W') then
radixmark = '%' .. radixmark
end
if delimmark and find(delimmark, '%W') then
delimmark = '%' .. delimmark
end
end
local f_number = function(n)
if tiny < n and n < huge then
local s = format("%.17g", n)
if radixordelim then
if delimmark then
s = gsub(s, delimmark, '')
end
if radixmark then
s = gsub(s, radixmark, '.')
end
end
builder[i] = s
i = i+1
return
end
error('invalid number')
end
local doencode
local f_string_subst = {
['"'] = '\\"',
['\\'] = '\\\\',
['\b'] = '\\b',
['\f'] = '\\f',
['\n'] = '\\n',
['\r'] = '\\r',
['\t'] = '\\t',
__index = function(_, c)
return format('\\u00%02X', byte(c))
end
}
setmetatable(f_string_subst, f_string_subst)
local function f_string(s)
builder[i] = '"'
if find(s, f_string_pat) then
s = gsub(s, f_string_pat, f_string_subst)
end
builder[i+1] = s
builder[i+2] = '"'
i = i+3
end
local function f_table(o)
if visited[o] then
error("loop detected")
end
visited[o] = true
local tmp = o[0]
if type(tmp) == 'number' then -- arraylen available
builder[i] = '['
i = i+1
for j = 1, tmp do
doencode(o[j])
builder[i] = ','
i = i+1
end
if tmp > 0 then
i = i-1
end
builder[i] = ']'
else
tmp = o[1]
if tmp ~= nil then -- detected as array
builder[i] = '['
i = i+1
local j = 2
repeat
doencode(tmp)
tmp = o[j]
if tmp == nil then
break
end
j = j+1
builder[i] = ','
i = i+1
until false
builder[i] = ']'
else -- detected as object
builder[i] = '{'
i = i+1
local tmp = i
for k, v in pairs(o) do
if type(k) ~= 'string' then
error("non-string key")
end
f_string(k)
builder[i] = ':'
i = i+1
doencode(v)
builder[i] = ','
i = i+1
end
if i > tmp then
i = i-1
end
builder[i] = '}'
end
end
i = i+1
visited[o] = nil
end
local dispatcher = {
boolean = f_tostring,
number = f_number,
string = f_string,
table = f_table,
__index = function()
error("invalid type value")
end
}
setmetatable(dispatcher, dispatcher)
function doencode(v)
if v == nullv then
builder[i] = 'null'
i = i+1
return
end
return dispatcher[type(v)](v)
end
local function encode(v_, nullv_)
v, nullv = v_, nullv_
i, builder, visited = 1, {}, {}
doencode(v)
return concat(builder)
end
return encode
end
return newencoder

View File

@ -0,0 +1,525 @@
local error = error
local byte, char, find, gsub, match, sub = string.byte, string.char, string.find, string.gsub, string.match, string.sub
local tonumber = tonumber
local tostring, type, unpack = tostring, type, table.unpack or unpack
-- The function that interprets JSON strings is separated into another file so as to
-- use bitwise operation to speedup unicode codepoints processing on Lua 5.3.
local genstrlib
if _VERSION == "Lua 5.3" then
genstrlib = include '_str_lib_lua53.lua'
else
genstrlib = include '_str_lib.lua'
end
local _ENV = nil
local function nop() end
local function newparser(src, saxtbl)
local json, jsonnxt
local jsonlen, pos, acc = 0, 1, 0
-- `f` is the temporary for dispatcher[c] and
-- the dummy for the first return value of `find`
local dispatcher, f
-- initialize
if type(src) == 'string' then
json = src
jsonlen = #json
jsonnxt = function()
json = ''
jsonlen = 0
jsonnxt = nop
end
else
jsonnxt = function()
acc = acc + jsonlen
pos = 1
repeat
json = src()
if not json then
json = ''
jsonlen = 0
jsonnxt = nop
return
end
jsonlen = #json
until jsonlen > 0
end
jsonnxt()
end
local sax_startobject = saxtbl.startobject or nop
local sax_key = saxtbl.key or nop
local sax_endobject = saxtbl.endobject or nop
local sax_startarray = saxtbl.startarray or nop
local sax_endarray = saxtbl.endarray or nop
local sax_string = saxtbl.string or nop
local sax_number = saxtbl.number or nop
local sax_boolean = saxtbl.boolean or nop
local sax_null = saxtbl.null or nop
--[[
Helper
--]]
local function tryc()
local c = byte(json, pos)
if not c then
jsonnxt()
c = byte(json, pos)
end
return c
end
local function parseerror(errmsg)
error("parse error at " .. acc + pos .. ": " .. errmsg)
end
local function tellc()
return tryc() or parseerror("unexpected termination")
end
local function spaces() -- skip spaces and prepare the next char
while true do
f, pos = find(json, '^[ \n\r\t]*', pos)
if pos ~= jsonlen then
pos = pos+1
return
end
if jsonlen == 0 then
parseerror("unexpected termination")
end
jsonnxt()
end
end
--[[
Invalid
--]]
local function f_err()
parseerror('invalid value')
end
--[[
Constants
--]]
-- fallback slow constants parser
local function generic_constant(target, targetlen, ret, sax_f)
for i = 1, targetlen do
local c = tellc()
if byte(target, i) ~= c then
parseerror("invalid char")
end
pos = pos+1
end
return sax_f(ret)
end
-- null
local function f_nul()
if sub(json, pos, pos+2) == 'ull' then
pos = pos+3
return sax_null(nil)
end
return generic_constant('ull', 3, nil, sax_null)
end
-- false
local function f_fls()
if sub(json, pos, pos+3) == 'alse' then
pos = pos+4
return sax_boolean(false)
end
return generic_constant('alse', 4, false, sax_boolean)
end
-- true
local function f_tru()
if sub(json, pos, pos+2) == 'rue' then
pos = pos+3
return sax_boolean(true)
end
return generic_constant('rue', 3, true, sax_boolean)
end
--[[
Numbers
Conceptually, the longest prefix that matches to `(0|[1-9][0-9]*)(\.[0-9]*)?([eE][+-]?[0-9]*)?`
(in regexp) is captured as a number and its conformance to the JSON spec is checked.
--]]
-- deal with non-standard locales
local radixmark = match(tostring(0.5), '[^0-9]')
local fixedtonumber = tonumber
if radixmark ~= '.' then -- deals with non-standard locales
if find(radixmark, '%W') then
radixmark = '%' .. radixmark
end
fixedtonumber = function(s)
return tonumber(gsub(s, '.', radixmark))
end
end
-- fallback slow parser
local function generic_number(mns)
local buf = {}
local i = 1
local c = byte(json, pos)
pos = pos+1
local function nxt()
buf[i] = c
i = i+1
c = tryc()
pos = pos+1
end
if c == 0x30 then
nxt()
else
repeat nxt() until not (c and 0x30 <= c and c < 0x3A)
end
if c == 0x2E then
nxt()
if not (c and 0x30 <= c and c < 0x3A) then
parseerror('invalid number')
end
repeat nxt() until not (c and 0x30 <= c and c < 0x3A)
end
if c == 0x45 or c == 0x65 then
nxt()
if c == 0x2B or c == 0x2D then
nxt()
end
if not (c and 0x30 <= c and c < 0x3A) then
parseerror('invalid number')
end
repeat nxt() until not (c and 0x30 <= c and c < 0x3A)
end
pos = pos-1
local num = char(unpack(buf))
num = fixedtonumber(num)-0.0
if mns then
num = -num
end
return sax_number(num)
end
-- `0(\.[0-9]*)?([eE][+-]?[0-9]*)?`
local function f_zro(mns)
local postmp = pos
local num
local c = byte(json, postmp)
if c == 0x2E then -- is this `.`?
num = match(json, '^.[0-9]*', pos) -- skipping 0
local numlen = #num
if numlen == 1 then
pos = pos-1
return generic_number(mns)
end
postmp = pos + numlen
c = byte(json, postmp)
end
if c == 0x45 or c == 0x65 then -- is this e or E?
local numexp = match(json, '^[^eE]*[eE][-+]?[0-9]+', pos)
if not numexp then
pos = pos-1
return generic_number(mns)
end
if num then -- since `0e.*` is always 0.0, ignore those
num = numexp
end
postmp = pos + #numexp
end
if postmp > jsonlen then
pos = pos-1
return generic_number(mns)
end
pos = postmp
if num then
num = fixedtonumber(num)
else
num = 0.0
end
if mns then
num = -num
end
return sax_number(num)
end
-- `[1-9][0-9]*(\.[0-9]*)?([eE][+-]?[0-9]*)?`
local function f_num(mns)
pos = pos-1
local num = match(json, '^.[0-9]*%.?[0-9]*', pos)
if byte(num, -1) == 0x2E then
return generic_number(mns)
end
local postmp = pos + #num
local c = byte(json, postmp)
if c == 0x45 or c == 0x65 then -- e or E?
num = match(json, '^[^eE]*[eE][-+]?[0-9]+', pos)
if not num then
return generic_number(mns)
end
postmp = pos + #num
end
if postmp > jsonlen then
return generic_number(mns)
end
pos = postmp
num = fixedtonumber(num)-0.0
if mns then
num = -num
end
return sax_number(num)
end
-- skip minus sign
local function f_mns()
local c = byte(json, pos) or tellc()
if c then
pos = pos+1
if c > 0x30 then
if c < 0x3A then
return f_num(true)
end
else
if c > 0x2F then
return f_zro(true)
end
end
end
parseerror("invalid number")
end
--[[
Strings
--]]
local f_str_lib = genstrlib(parseerror)
local f_str_surrogateok = f_str_lib.surrogateok -- whether codepoints for surrogate pair are correctly paired
local f_str_subst = f_str_lib.subst -- the function passed to gsub that interprets escapes
local function f_str(iskey)
local pos2 = pos
local newpos
local str = ''
local bs
while true do
while true do -- search '\' or '"'
newpos = find(json, '[\\"]', pos2)
if newpos then
break
end
str = str .. sub(json, pos, jsonlen)
if pos2 == jsonlen+2 then
pos2 = 2
else
pos2 = 1
end
jsonnxt()
end
if byte(json, newpos) == 0x22 then -- break if '"'
break
end
pos2 = newpos+2 -- skip '\<char>'
bs = true -- remember that backslash occurs
end
str = str .. sub(json, pos, newpos-1)
pos = newpos+1
if bs then -- check if backslash occurs
str = gsub(str, '\\(.)([^\\]*)', f_str_subst) -- interpret escapes
if not f_str_surrogateok() then
parseerror("invalid surrogate pair")
end
end
if iskey then
return sax_key(str)
end
return sax_string(str)
end
--[[
Arrays, Objects
--]]
-- arrays
local function f_ary()
sax_startarray()
spaces()
if byte(json, pos) ~= 0x5D then -- check the closing bracket ']', that consists an empty array
local newpos
while true do
f = dispatcher[byte(json, pos)] -- parse value
pos = pos+1
f()
f, newpos = find(json, '^[ \n\r\t]*,[ \n\r\t]*', pos) -- check comma
if not newpos then
f, newpos = find(json, '^[ \n\r\t]*%]', pos) -- check closing bracket
if newpos then
pos = newpos
break
end
spaces() -- since the current chunk can be ended, skip spaces toward following chunks
local c = byte(json, pos)
if c == 0x2C then -- check comma again
pos = pos+1
spaces()
newpos = pos-1
elseif c == 0x5D then -- check closing bracket again
break
else
parseerror("no closing bracket of an array")
end
end
pos = newpos+1
if pos > jsonlen then
spaces()
end
end
end
pos = pos+1
return sax_endarray()
end
-- objects
local function f_obj()
sax_startobject()
spaces()
if byte(json, pos) ~= 0x7D then -- check the closing bracket `}`, that consists an empty object
local newpos
while true do
if byte(json, pos) ~= 0x22 then
parseerror("not key")
end
pos = pos+1
f_str(true)
f, newpos = find(json, '^[ \n\r\t]*:[ \n\r\t]*', pos) -- check colon
if not newpos then
spaces() -- since the current chunk can be ended, skip spaces toward following chunks
if byte(json, pos) ~= 0x3A then -- check colon again
parseerror("no colon after a key")
end
pos = pos+1
spaces()
newpos = pos-1
end
pos = newpos+1
if pos > jsonlen then
spaces()
end
f = dispatcher[byte(json, pos)] -- parse value
pos = pos+1
f()
f, newpos = find(json, '^[ \n\r\t]*,[ \n\r\t]*', pos) -- check comma
if not newpos then
f, newpos = find(json, '^[ \n\r\t]*}', pos) -- check closing bracket
if newpos then
pos = newpos
break
end
spaces() -- since the current chunk can be ended, skip spaces toward following chunks
local c = byte(json, pos)
if c == 0x2C then -- check comma again
pos = pos+1
spaces()
newpos = pos-1
elseif c == 0x7D then -- check closing bracket again
break
else
parseerror("no closing bracket of an object")
end
end
pos = newpos+1
if pos > jsonlen then
spaces()
end
end
end
pos = pos+1
return sax_endobject()
end
--[[
The jump table to dispatch a parser for a value, indexed by the code of the value's first char.
Key should be non-nil.
--]]
dispatcher = {
f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err,
f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err,
f_err, f_err, f_str, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_mns, f_err, f_err,
f_zro, f_num, f_num, f_num, f_num, f_num, f_num, f_num, f_num, f_num, f_err, f_err, f_err, f_err, f_err, f_err,
f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err,
f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_ary, f_err, f_err, f_err, f_err,
f_err, f_err, f_err, f_err, f_err, f_err, f_fls, f_err, f_err, f_err, f_err, f_err, f_err, f_err, f_nul, f_err,
f_err, f_err, f_err, f_err, f_tru, f_err, f_err, f_err, f_err, f_err, f_err, f_obj, f_err, f_err, f_err, f_err,
}
dispatcher[0] = f_err
--[[
public funcitons
--]]
local function run()
spaces()
f = dispatcher[byte(json, pos)]
pos = pos+1
f()
end
local function read(n)
if n < 0 then
error("the argument must be non-negative")
end
local pos2 = (pos-1) + n
local str = sub(json, pos, pos2)
while pos2 > jsonlen and jsonlen ~= 0 do
jsonnxt()
pos2 = pos2 - (jsonlen - (pos-1))
str = str .. sub(json, pos, pos2)
end
if jsonlen ~= 0 then
pos = pos2+1
end
return str
end
local function tellpos()
return acc + pos
end
return {
run = run,
tryc = tryc,
read = read,
tellpos = tellpos,
}
end
local function newfileparser(fn, saxtbl)
local fp = io.open(fn)
local function gen()
local s
if fp then
s = fp:read(8192)
if not s then
fp:close()
fp = nil
end
end
return s
end
return newparser(gen, saxtbl)
end
return {
newparser = newparser,
newfileparser = newfileparser
}

View File

@ -0,0 +1,206 @@
local semver = {
_VERSION = '1.2.0',
_DESCRIPTION = 'semver for Lua',
_URL = 'https://github.com/kikito/semver.lua',
_LICENSE = [[
MIT LICENSE
Copyright (c) 2015 Enrique García Cota
Permission is hereby granted, free of charge, to any person obtaining a
copy of tother software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and tother permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
]]
}
local function checkPositiveInteger(number, name)
assert(number >= 0, name .. ' must be a valid positive number')
assert(math.floor(number) == number, name .. ' must be an integer')
end
local function present(value)
return value and value ~= ''
end
-- splitByDot("a.bbc.d") == {"a", "bbc", "d"}
local function splitByDot(str)
str = str or ""
local t, count = {}, 0
str:gsub("([^%.]+)", function(c)
count = count + 1
t[count] = c
end)
return t
end
local function parsePrereleaseAndBuildWithSign(str)
local prereleaseWithSign, buildWithSign = str:match("^(-[^+]+)(+.+)$")
if not (prereleaseWithSign and buildWithSign) then
prereleaseWithSign = str:match("^(-.+)$")
buildWithSign = str:match("^(+.+)$")
end
assert(prereleaseWithSign or buildWithSign, ("The parameter %q must begin with + or - to denote a prerelease or a build"):format(str))
return prereleaseWithSign, buildWithSign
end
local function parsePrerelease(prereleaseWithSign)
if prereleaseWithSign then
local prerelease = prereleaseWithSign:match("^-(%w[%.%w-]*)$")
assert(prerelease, ("The prerelease %q is not a slash followed by alphanumerics, dots and slashes"):format(prereleaseWithSign))
return prerelease
end
end
local function parseBuild(buildWithSign)
if buildWithSign then
local build = buildWithSign:match("^%+(%w[%.%w-]*)$")
assert(build, ("The build %q is not a + sign followed by alphanumerics, dots and slashes"):format(buildWithSign))
return build
end
end
local function parsePrereleaseAndBuild(str)
if not present(str) then return nil, nil end
local prereleaseWithSign, buildWithSign = parsePrereleaseAndBuildWithSign(str)
local prerelease = parsePrerelease(prereleaseWithSign)
local build = parseBuild(buildWithSign)
return prerelease, build
end
local function parseVersion(str)
local sMajor, sMinor, sPatch, sPrereleaseAndBuild = str:match("^(%d+)%.?(%d*)%.?(%d*)(.-)$")
assert(type(sMajor) == 'string', ("Could not extract version number(s) from %q"):format(str))
local major, minor, patch = tonumber(sMajor), tonumber(sMinor), tonumber(sPatch)
local prerelease, build = parsePrereleaseAndBuild(sPrereleaseAndBuild)
return major, minor, patch, prerelease, build
end
-- return 0 if a == b, -1 if a < b, and 1 if a > b
local function compare(a,b)
return a == b and 0 or a < b and -1 or 1
end
local function compareIds(myId, otherId)
if myId == otherId then return 0
elseif not myId then return -1
elseif not otherId then return 1
end
local selfNumber, otherNumber = tonumber(myId), tonumber(otherId)
if selfNumber and otherNumber then -- numerical comparison
return compare(selfNumber, otherNumber)
-- numericals are always smaller than alphanums
elseif selfNumber then
return -1
elseif otherNumber then
return 1
else
return compare(myId, otherId) -- alphanumerical comparison
end
end
local function smallerIdList(myIds, otherIds)
local myLength = #myIds
local comparison
for i=1, myLength do
comparison = compareIds(myIds[i], otherIds[i])
if comparison ~= 0 then
return comparison == -1
end
-- if comparison == 0, continue loop
end
return myLength < #otherIds
end
local function smallerPrerelease(mine, other)
if mine == other or not mine then return false
elseif not other then return true
end
return smallerIdList(splitByDot(mine), splitByDot(other))
end
local methods = {}
function methods:nextMajor()
return semver(self.major + 1, 0, 0)
end
function methods:nextMinor()
return semver(self.major, self.minor + 1, 0)
end
function methods:nextPatch()
return semver(self.major, self.minor, self.patch + 1)
end
local mt = { __index = methods }
function mt:__eq(other)
return self.major == other.major and
self.minor == other.minor and
self.patch == other.patch and
self.prerelease == other.prerelease
-- notice that build is ignored for precedence in semver 2.0.0
end
function mt:__lt(other)
if self.major ~= other.major then return self.major < other.major end
if self.minor ~= other.minor then return self.minor < other.minor end
if self.patch ~= other.patch then return self.patch < other.patch end
return smallerPrerelease(self.prerelease, other.prerelease)
-- notice that build is ignored for precedence in semver 2.0.0
end
-- This works like the "pessimisstic operator" in Rubygems.
-- if a and b are versions, a ^ b means "b is backwards-compatible with a"
-- in other words, "it's safe to upgrade from a to b"
function mt:__pow(other)
return self.major == other.major and
self.minor <= other.minor
end
function mt:__tostring()
local buffer = { ("%d.%d.%d"):format(self.major, self.minor, self.patch) }
if self.prerelease then table.insert(buffer, "-" .. self.prerelease) end
if self.build then table.insert(buffer, "+" .. self.build) end
return table.concat(buffer)
end
local function new(major, minor, patch, prerelease, build)
assert(major, "At least one parameter is needed")
if type(major) == 'string' then
major,minor,patch,prerelease,build = parseVersion(major)
end
patch = patch or 0
minor = minor or 0
checkPositiveInteger(major, "major")
checkPositiveInteger(minor, "minor")
checkPositiveInteger(patch, "patch")
local result = {major=major, minor=minor, patch=patch, prerelease=prerelease, build=build}
return setmetatable(result, mt)
end
setmetatable(semver, { __call = function(_, ...) return new(...) end })
semver._VERSION= semver(semver._VERSION)
return semver

2618
src/GL/eglew.h Normal file

File diff suppressed because it is too large Load Diff

23686
src/GL/glew.h Normal file

File diff suppressed because it is too large Load Diff

1775
src/GL/glxew.h Normal file

File diff suppressed because it is too large Load Diff

1447
src/GL/wglew.h Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

Binary file not shown.

47
src/oxide.c Normal file
View File

@ -0,0 +1,47 @@
#include "oxide_internal.h"
#include <stdint.h>
#define GLEW_STATIC
#include "GL/glew.h"
bool oxide_init_gl() {
oxide_init_os_gl();
glewInit();
glClearColor(0, 0, 0, 0);
return true;
}
Oxide_InitResult oxide_init(const char *target_name) {
bool window_success = oxide_init_window();
if (!window_success) {
return oxide_init_window_failed;
}
bool gl_success = oxide_init_gl();
if (!gl_success) {
return oxide_init_gl_failed;
}
if (!oxide_set_target(target_name)) {
return oxide_init_target_failed;
}
oxide_resize();
return oxide_init_success;
}
void oxide_shutdown() {
}
void oxide_begin_frame() {
oxide_begin_os_frame();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
}
void oxide_end_frame() {
oxide_end_os_frame();
}

9
src/oxide_internal.h Normal file
View File

@ -0,0 +1,9 @@
#pragma once
#include "oxide.h"
extern void oxide_begin_os_frame();
extern void oxide_end_os_frame();
extern bool oxide_init_os_gl();
extern bool oxide_init_window();

307
src/oxide_linux.c Normal file
View File

@ -0,0 +1,307 @@
#ifndef _WIN32
#include <string.h>
#include "oxide_internal.h"
#include "GL/glew.h"
#include "GL/glxew.h"
#include <X11/extensions/Xfixes.h>
#include <X11/extensions/shape.h>
typedef struct _XDisplay Display;
typedef unsigned long XID;
typedef struct __GLXcontextRec * GLXContext;
typedef struct __GLXFBConfigRec *GLXFBConfig;
static Display * display;
static int screen;
static XID color_map;
static GLXContext gl_context;
static GLXFBConfig gl_fbconfig;
static bool mapped = false;
static unsigned oxide_window_handle;
static unsigned oxide_target_handle;
#define GLX_CONTEXT_MAJOR_VERSION_ARB 0x2091
#define GLX_CONTEXT_MINOR_VERSION_ARB 0x2092
typedef GLXContext (*glXCreateContextAttribsARBfn)(Display *, GLXFBConfig,
GLXContext, bool,
const int *);
bool oxide_init_window() {
display = XOpenDisplay(NULL);
screen = DefaultScreen(display);
Window root = DefaultRootWindow(display);
int dummy;
if (!glXQueryExtension(display, &dummy, &dummy)) {
return false;
}
static int dblBuf[] = {GLX_RGBA,
GLX_RED_SIZE, 1,
GLX_GREEN_SIZE, 1,
GLX_BLUE_SIZE, 1,
GLX_DEPTH_SIZE, 12,
GLX_DOUBLEBUFFER,
None};
XVisualInfo *vi = glXChooseVisual(display, screen, dblBuf);
if (vi == NULL) {
return false;
}
gl_context = glXCreateContext(display, vi, None, 1);
glXMakeCurrent(display, 0, gl_context);
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) {
return false;
}
if (glxewInit() != GLEW_OK) {
return false;
}
GLint attribs[] = {GLX_X_RENDERABLE,
GL_TRUE,
GLX_DRAWABLE_TYPE,
GLX_WINDOW_BIT,
GLX_RENDER_TYPE,
GLX_RGBA_BIT,
GLX_X_VISUAL_TYPE,
GLX_TRUE_COLOR,
GLX_DEPTH_SIZE,
24,
GLX_STENCIL_SIZE,
8,
GLX_RED_SIZE,
8,
GLX_GREEN_SIZE,
8,
GLX_BLUE_SIZE,
8,
GLX_ALPHA_SIZE,
8,
GLX_DOUBLEBUFFER,
GL_TRUE,
None};
int fbc_count;
GLXFBConfig *fbc = glXChooseFBConfig(display, screen, attribs, &fbc_count);
if (fbc == NULL) return false;
int fbc_best = -1;
int fbc_best_samples = -1;
for (int i = 0; i < fbc_count; ++i) {
XVisualInfo *info = glXGetVisualFromFBConfig(display, fbc[i]);
if (info->depth != 32)
continue;
int samples;
glXGetFBConfigAttrib(display, fbc[i], GLX_SAMPLES, &samples);
if (fbc_best < 0 || samples > fbc_best_samples) {
fbc_best = i;
fbc_best_samples = samples;
}
XFree(info);
}
if (fbc_best == -1) return false;
gl_fbconfig = fbc[fbc_best];
XFree(fbc);
XVisualInfo *info = glXGetVisualFromFBConfig(display, gl_fbconfig);
if (info == NULL) {
return false;
}
color_map = XCreateColormap(display, root, info->visual, AllocNone);
XSetWindowAttributes attr;
attr.background_pixel = 0x0;
attr.border_pixel = 0;
attr.save_under = 1;
attr.override_redirect = 1;
attr.colormap = color_map;
attr.event_mask = 0x0;
attr.do_not_propagate_mask =
(KeyPressMask | KeyReleaseMask | ButtonPressMask | ButtonReleaseMask |
PointerMotionMask | ButtonMotionMask);
unsigned long mask = CWBackPixel | CWBorderPixel | CWSaveUnder |
CWOverrideRedirect | CWColormap | CWEventMask |
CWDontPropagate;
oxide_window_handle = XCreateWindow(display, root, 0, 0,
800, 600, 0,
info->depth, InputOutput, info->visual, mask, &attr);
if (oxide_window_handle == 0) return false;
XShapeCombineMask(display, oxide_window_handle, ShapeInput, 0, 0, None, ShapeSet);
XShapeSelectInput(display, oxide_window_handle, ShapeNotifyMask);
XserverRegion region = XFixesCreateRegion(display, NULL, 0);
XFixesSetWindowShapeRegion(display, oxide_window_handle, ShapeInput, 0, 0, region);
XFixesDestroyRegion(display, region);
XFree(info);
XStoreName(display, oxide_window_handle, "oxide");
return true;
}
bool oxide_init_os_gl() {
glXCreateContextAttribsARBfn glXCreateContextAttribsARB = (glXCreateContextAttribsARBfn)glXGetProcAddressARB((const GLubyte *)"glXCreateContextAttribsARB");
int ctx_attribs[] = {GLX_CONTEXT_MAJOR_VERSION_ARB, 4,
GLX_CONTEXT_MINOR_VERSION_ARB, 2, None};
glXDestroyContext(display, gl_context);
gl_context = glXCreateContextAttribsARB(display, gl_fbconfig, NULL, GL_TRUE, ctx_attribs);
if (gl_context == NULL) return false;
glXMakeCurrent(display, oxide_window_handle, gl_context);
// Disable the swap interval for all platforms
// Glew handles ignoring calls for the wrong platform
// So we can just try all of them
glXSwapIntervalEXT(display, oxide_window_handle, 0);
glXSwapIntervalMESA(0);
glXSwapIntervalSGI(0);
XSync(display, false);
// XMapWindow(display, oxide_window_handle);
// mapped = true;
return true;
}
bool is_window_child(Display *display, Window target, Window current) {
Window retval, root, parent, *children;
unsigned children_count;
bool ret = false;
if (current == target) return true;
if (XQueryTree(display, current, &root, &parent, &children, &children_count)) {
for (int i = 0; i < children_count; i++) {
ret = is_window_child(display, target, children[i]);
if (ret) break;
}
XFree(children);
}
return ret;
}
Window window_from_name_search(Display *display, Window display_root, Window current, char const *needle) {
Window retval, root, parent, *children;
unsigned children_count;
char * name = NULL;
// Check if this window has the name we seek
if (XFetchName(display, current, &name) > 0) {
int r = strcmp(needle, name);
XFree(name);
if (r == 0) return current;
}
retval = 0;
// If it does not: check all subwindows recursively.
if (0 != XQueryTree(display, current, &root, &parent, &children, &children_count)) {
unsigned i;
for (i = 0; i < children_count; ++i) {
Window win = window_from_name_search(display, display_root, children[i], needle);
if (win != 0) {
#if 0
if (current != display_root)
retval = current;
else
#endif
retval = win;
break;
}
}
XFree(children);
}
return retval;
}
bool oxide_set_target(const char *target_name) {
auto root = RootWindow(display, screen);
oxide_target_handle = window_from_name_search(display, root, root, target_name);
return oxide_target_handle != 0;
}
Window get_focus_window(Display *d) {
Window w;
int revert_to;
XGetInputFocus(d, &w, &revert_to);
return w;
}
Oxide_WindowSize oxide_resize() {
Oxide_WindowSize new_size = {.width = 0, .height = 0};
if (oxide_target_handle == 0) return new_size;
auto root = DefaultRootWindow(display);
XWindowAttributes window_attribs;
XGetWindowAttributes(display, oxide_target_handle, &window_attribs);
int width = window_attribs.width;
int height = window_attribs.height;
Window child;
int x = window_attribs.x;
int y = window_attribs.y;
XTranslateCoordinates(display, oxide_target_handle, root, window_attribs.x, window_attribs.y, &x, &y, &child);
x = x - window_attribs.x;
y = y - window_attribs.y;
glViewport(0, 0, width, height);
XMoveResizeWindow(display, oxide_window_handle, x, y, width, height);
new_size = (Oxide_WindowSize){.width = width, .height = height};
}
void oxide_begin_os_frame() {
glXMakeCurrent(display, oxide_window_handle, gl_context);
}
void oxide_end_os_frame() {
Window focused = get_focus_window(display);
if (!mapped && is_window_child(display, focused, oxide_target_handle)) {
XMapWindow(display, oxide_window_handle);
mapped = true;
} else if (mapped && !is_window_child(display, focused, oxide_target_handle)) {
XUnmapWindow(display, oxide_window_handle);
mapped = false;
}
if (mapped) {
oxide_resize();
glXSwapBuffers(display, oxide_window_handle);
}
}
#endif

263
src/oxide_windows.c Normal file
View File

@ -0,0 +1,263 @@
#ifdef _WIN32
#include "oxide_internal.h"
#define GLCOMPAT
#define GLEW_STATIC
#include "GL/glew.h"
#include "GL/wglew.h"
#include <dwmapi.h>
#include <windows.h>
#include <windowsx.h>
#include <gl/gl.h>
typedef void *Handle;
extern Handle oxide_window_handle;
extern Handle oxide_target_handle;
Handle oxide_dc;
Handle oxide_rc;
static const char oxide_window_name[] = "oxide";
bool oxide_init_os_gl() {
GLuint PixelFormat;
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER | PFD_SUPPORT_COMPOSITION;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 32;
pfd.cDepthBits = 24;
pfd.cAlphaBits = 8;
pfd.cStencilBits = 8;
pfd.iLayerType = PFD_MAIN_PLANE;
oxide_dc = GetDC((HWND)oxide_window_handle);
PixelFormat = ChoosePixelFormat(oxide_dc, &pfd);
SetPixelFormat(oxide_dc, PixelFormat, &pfd);
oxide_rc = wglCreateContext(oxide_dc);
wglMakeCurrent(oxide_dc, oxide_rc);
// calling glewinit NOW because the inside glew, there is mistake to fix...
// This is the joy of using Core. The query glGetString(GL_EXTENSIONS) is deprecated from the Core profile.
// You need to use glGetStringi(GL_EXTENSIONS, <index>) instead. Sounds like a "bug" in GLEW.
if (!wglCreateContextAttribsARB) wglCreateContextAttribsARB = (PFNWGLCREATECONTEXTATTRIBSARBPROC)wglGetProcAddress("wglCreateContextAttribsARB");
if (!wglSwapIntervalEXT) wglSwapIntervalEXT = (PFNWGLSWAPINTERVALEXTPROC)wglGetProcAddress("wglSwapIntervalEXT");
if (wglCreateContextAttribsARB) {
HGLRC atrib_rc = NULL;
int attribList[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, 4,
WGL_CONTEXT_MINOR_VERSION_ARB, 2,
#ifdef GLCOMPAT
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB,
#else
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
#endif
WGL_CONTEXT_FLAGS_ARB,
//WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB|
#ifndef GLCOMPAT
WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB |
#endif
#ifdef _DEBUG
WGL_CONTEXT_DEBUG_BIT_ARB
#else
0
#endif
,
0, 0};
if (!(atrib_rc = wglCreateContextAttribsARB((HDC)oxide_dc, 0, attribList))) {
return false;
}
if (!wglMakeCurrent(oxide_dc, atrib_rc)) {
return false;
} else {
wglDeleteContext(oxide_rc);
oxide_rc = atrib_rc;
#ifdef _DEBUG
if (!glDebugMessageCallbackARB) {
glDebugMessageCallbackARB = (PFNGLDEBUGMESSAGECALLBACKARBPROC)wglGetProcAddress("glDebugMessageCallbackARB");
glDebugMessageControlARB = (PFNGLDEBUGMESSAGECONTROLARBPROC)wglGetProcAddress("glDebugMessageControlARB");
}
if (glDebugMessageCallbackARB) {
//glDebugMessageCallbackARB(glErrorCallback, NULL);
glDebugMessageControlARB(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_HIGH_ARB, 0, NULL, GL_TRUE);
}
#endif
wglSwapIntervalEXT(0);
}
}
return true;
}
void *__stdcall oxide_window_proc(HWND window_handle, unsigned message, size_t w_param, size_t l_param) {
switch (message) {
case WM_CREATE: {
break;
}
case WM_CLOSE: {
DestroyWindow(oxide_window_handle);
break;
}
//release resources
case WM_DESTROY: {
wglMakeCurrent(NULL, NULL);
wglDeleteContext(oxide_rc);
DeleteDC(oxide_dc);
ReleaseDC(oxide_window_handle, oxide_dc);
PostQuitMessage(0);
UnregisterClass(oxide_window_name, GetModuleHandle(NULL));
break;
}
case WM_PAINT: {
PAINTSTRUCT ps;
BeginPaint(oxide_window_handle, &ps);
EndPaint(oxide_window_handle, &ps);
return NULL;
}
}
return (void *)DefWindowProc(window_handle, message, w_param, l_param);
}
bool oxide_init_window() {
Handle local_instance = GetModuleHandle(NULL);
WNDCLASSEX winClass;
MSG uMsg;
memset(&uMsg, 0, sizeof(uMsg));
winClass.lpszClassName = oxide_window_name;
winClass.cbSize = sizeof(WNDCLASSEX);
winClass.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC;
winClass.lpfnWndProc = (WNDPROC)&oxide_window_proc;
winClass.hInstance = local_instance,
winClass.hIcon = 0x0;
winClass.hIconSm = 0x0;
winClass.hCursor = LoadCursor(NULL, IDC_ARROW);
winClass.hbrBackground = (HBRUSH)CreateSolidBrush(0x00000000);
winClass.lpszMenuName = NULL;
winClass.cbClsExtra = 0;
winClass.cbWndExtra = 0;
if (!RegisterClassEx(&winClass))
return false;
oxide_window_handle = CreateWindowExA(WS_EX_TRANSPARENT | WS_EX_LAYERED | WS_EX_TOPMOST,
oxide_window_name, oxide_window_name,
WS_VISIBLE | WS_POPUP | WS_POPUP,
0,
0,
0,
0,
NULL,
NULL,
local_instance,
NULL);
if (oxide_window_handle == NULL) {
int err = GetLastError();
return false;
}
SetLayeredWindowAttributes(oxide_window_handle, 0, 255, LWA_ALPHA);
DWM_BLURBEHIND bb = {0};
HRGN hRgn = CreateRectRgn(0, 0, -1, -1);
bb.dwFlags = DWM_BB_ENABLE | DWM_BB_BLURREGION;
bb.hRgnBlur = hRgn;
bb.fEnable = TRUE;
DwmEnableBlurBehindWindow(oxide_window_handle, &bb);
//ShowWindow(window_handle, 10);
//UpdateWindow(window_handle);
return true;
}
bool oxide_set_target(const char *target_name) {
// Try and find by class name first
oxide_target_handle = FindWindowA(target_name, NULL);
if (oxide_target_handle != NULL) return true;
// Then try and find by window name
oxide_target_handle = FindWindowA(NULL, target_name);
if (oxide_target_handle == NULL) return false;
return true;
}
Oxide_WindowSize oxide_resize() {
Oxide_WindowSize new_size = {.width = 0, .height = 0};
if (oxide_target_handle == NULL || oxide_window_handle == NULL) return new_size; // we cant size the window yet
RECT window_bounds;
RECT client_bounds;
// get the inner and outer bounds of the target
GetWindowRect(oxide_target_handle, &window_bounds);
GetClientRect(oxide_target_handle, &client_bounds);
#define rect_size_zero(a) (((a.right - a.left) == 0 || (a.bottom - a.top) == 0))
if (rect_size_zero(window_bounds) || rect_size_zero(client_bounds)) return new_size;
#undef rect_size_zero
//width and height of client rect
int width = client_bounds.right - client_bounds.left;
int height = client_bounds.bottom - client_bounds.top;
int posx = window_bounds.right - width;
int posy = window_bounds.bottom - height;
SetWindowPos(oxide_window_handle, 0, posx, posy, width, height, 0);
glViewport(0, 0, width, height);
glOrtho(client_bounds.left, client_bounds.right, client_bounds.bottom, client_bounds.top, 0, 1);
new_size = (Oxide_WindowSize){.width = width, .height = height};
return new_size;
}
void oxide_begin_os_frame() {
wglMakeCurrent(oxide_dc, oxide_rc);
}
void oxide_end_os_frame() {
wglMakeCurrent(oxide_dc, oxide_rc);
Handle active_window = GetForegroundWindow();
// dont paint if the target is minimised
if (!IsIconic(oxide_target_handle) && active_window == oxide_target_handle) {
ShowWindow(oxide_window_handle, SW_SHOWNORMAL);
SwapBuffers(oxide_dc);
oxide_resize();
} else {
ShowWindow(oxide_window_handle, SW_HIDE);
}
// process any new messages
MSG msg;
while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
TranslateMessage(&msg);
DispatchMessage(&msg);
}
return;
}
#endif

43
test/main.c Normal file
View File

@ -0,0 +1,43 @@
#include "oxide.h"
#include <stdio.h>
#ifdef _WIN32
#include <windows.h>
bool __stdcall consoleHandler(unsigned signal) {
if (signal == CTRL_C_EVENT) {
printf("Shutting down...\n");
oxide_shutdown();
exit(0);
}
return TRUE;
}
int main() {
Oxide_InitResult res = oxide_init("Notepad");
if (res) {
printf("Failed to load oxide %d\n", res);
return 1;
}
if (!SetConsoleCtrlHandler(consoleHandler, TRUE)) {
printf("\nERROR: Could not set control handler");
return 1;
}
while (true) {
oxide_begin_frame();
// ...
oxide_end_frame();
}
}
#else
int main() {
printf("This platform is not supported in this test right now...\n");
return 0;
}
#endif