Android Build Tools
This commit is contained in:
@ -0,0 +1,65 @@
|
||||
# Please add "source /path/to/bash-autocomplete.sh" to your .bashrc to use this.
|
||||
|
||||
_clang_filedir()
|
||||
{
|
||||
# _filedir function provided by recent versions of bash-completion package is
|
||||
# better than "compgen -f" because the former honors spaces in pathnames while
|
||||
# the latter doesn't. So we use compgen only when _filedir is not provided.
|
||||
_filedir 2> /dev/null || COMPREPLY=( $( compgen -f ) )
|
||||
}
|
||||
|
||||
_clang()
|
||||
{
|
||||
local cur prev words cword arg flags w1 w2
|
||||
# If latest bash-completion is not supported just initialize COMPREPLY and
|
||||
# initialize variables by setting manually.
|
||||
_init_completion -n 2> /dev/null
|
||||
if [[ "$?" != 0 ]]; then
|
||||
COMPREPLY=()
|
||||
cword=$COMP_CWORD
|
||||
cur="${COMP_WORDS[$cword]}"
|
||||
fi
|
||||
|
||||
w1="${COMP_WORDS[$cword - 1]}"
|
||||
if [[ $cword > 1 ]]; then
|
||||
w2="${COMP_WORDS[$cword - 2]}"
|
||||
fi
|
||||
|
||||
# Pass all the current command-line flags to clang, so that clang can handle
|
||||
# these internally.
|
||||
# '=' is separated differently by bash, so we have to concat them without ','
|
||||
for i in `seq 1 $cword`; do
|
||||
if [[ $i == $cword || "${COMP_WORDS[$(($i+1))]}" == '=' ]]; then
|
||||
arg="$arg${COMP_WORDS[$i]}"
|
||||
else
|
||||
arg="$arg${COMP_WORDS[$i]},"
|
||||
fi
|
||||
done
|
||||
|
||||
# expand ~ to $HOME
|
||||
eval local path=${COMP_WORDS[0]}
|
||||
# Use $'\t' so that bash expands the \t for older versions of sed.
|
||||
flags=$( "$path" --autocomplete="$arg" 2>/dev/null | sed -e $'s/\t.*//' )
|
||||
# If clang is old that it does not support --autocomplete,
|
||||
# fall back to the filename completion.
|
||||
if [[ "$?" != 0 ]]; then
|
||||
_clang_filedir
|
||||
return
|
||||
fi
|
||||
|
||||
# When clang does not emit any possible autocompletion, or user pushed tab after " ",
|
||||
# just autocomplete files.
|
||||
if [[ "$flags" == "$(echo -e '\n')" ]]; then
|
||||
# If -foo=<tab> and there was no possible values, autocomplete files.
|
||||
[[ "$cur" == '=' || "$cur" == -*= ]] && cur=""
|
||||
_clang_filedir
|
||||
elif [[ "$cur" == '=' ]]; then
|
||||
COMPREPLY=( $( compgen -W "$flags" -- "") )
|
||||
else
|
||||
# Bash automatically appends a space after '=' by default.
|
||||
# Disable it so that it works nicely for options in the form of -foo=bar.
|
||||
[[ "${flags: -1}" == '=' ]] && compopt -o nospace 2> /dev/null
|
||||
COMPREPLY=( $( compgen -W "$flags" -- "$cur" ) )
|
||||
fi
|
||||
}
|
||||
complete -F _clang clang
|
||||
@ -0,0 +1,969 @@
|
||||
.dark-primary-color { background: #1976D2; }
|
||||
.default-primary-color { background: #2196F3; }
|
||||
.light-primary-color { background: #BBDEFB; }
|
||||
.text-primary-color { color: #FFFFFF; }
|
||||
.accent-color { background: #00BCD4; }
|
||||
.primary-text-color { color: #212121; }
|
||||
.secondary-text-color { color: #727272; }
|
||||
.divider-color { border-color: #B6B6B6; }
|
||||
|
||||
/* for layout */
|
||||
html,
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
overflow: hidden;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
*, *:before, *:after {
|
||||
box-sizing: inherit;
|
||||
}
|
||||
|
||||
body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
header {
|
||||
flex: 0 0 50px;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
padding-left: 30px;
|
||||
}
|
||||
|
||||
header ol {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
header ol li {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
header form {
|
||||
display: flex;
|
||||
flex: 1;
|
||||
justify-content: flex-end;
|
||||
padding-right: 30px;
|
||||
}
|
||||
|
||||
header#header-search-sidebar {
|
||||
height: 50px;
|
||||
margin-bottom: 25px;
|
||||
}
|
||||
|
||||
footer {
|
||||
flex: 0 0 16px;
|
||||
text-align: center;
|
||||
padding: 16px 20px;
|
||||
}
|
||||
|
||||
main {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
padding: 20px;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
.sidebar-offcanvas-left {
|
||||
flex: 0 1 230px;
|
||||
overflow-y: scroll;
|
||||
padding: 20px 0 15px 30px;
|
||||
margin: 5px 20px 0 0;
|
||||
visibility: visible; /* shown by Javascript after scroll position restore */
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button{ display: none; height: 13px; border-radius: 0px; background-color: #AAA; }
|
||||
::-webkit-scrollbar-button:hover{ background-color: #AAA; }
|
||||
::-webkit-scrollbar-thumb{ background-color: #CCC; }
|
||||
::-webkit-scrollbar-thumb:hover{ background-color: #CCC; }
|
||||
::-webkit-scrollbar{ width: 4px; }
|
||||
/* ::-webkit-overflow-scrolling: touch; */
|
||||
|
||||
.main-content::-webkit-scrollbar{ width: 8px; }
|
||||
|
||||
.main-content {
|
||||
flex: 1;
|
||||
overflow-y: scroll;
|
||||
padding: 10px 20px 0 20px;
|
||||
visibility: visible; /* shown by Javascript after scroll position restore */
|
||||
}
|
||||
|
||||
.sidebar-offcanvas-right {
|
||||
flex: 0 1 12em;
|
||||
overflow-y: scroll;
|
||||
padding: 20px 15px 15px 15px;
|
||||
margin-top: 5px;
|
||||
margin-right: 20px;
|
||||
visibility: visible; /* shown by Javascript after scroll position restore */
|
||||
}
|
||||
/* end for layout */
|
||||
|
||||
body {
|
||||
-webkit-text-size-adjust: 100%;
|
||||
overflow-x: hidden;
|
||||
font-family: Roboto, sans-serif;
|
||||
font-size: 16px;
|
||||
line-height: 1.42857143;
|
||||
color: #111111;
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
/* some of this is to reset bootstrap */
|
||||
nav.navbar {
|
||||
background-color: inherit;
|
||||
min-height: 50px;
|
||||
border: 0;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.hidden-xs {
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 769px) {
|
||||
.hidden-l {
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
nav.navbar .row {
|
||||
padding-top: 8px;
|
||||
}
|
||||
|
||||
nav .container {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
header {
|
||||
background-color: #eeeeee;
|
||||
box-shadow: 0 3px 5px rgba(0,0,0,0.1);
|
||||
}
|
||||
|
||||
header#project-title {
|
||||
background-color: #fff;
|
||||
font-size: 200%;
|
||||
padding-top: 0.25em;
|
||||
padding-bottom: 0.25em;
|
||||
/* padding: 0em; */
|
||||
}
|
||||
|
||||
header.header-fixed nav.navbar-fixed-top {
|
||||
box-shadow: 0 3px 5px rgba(0,0,0,0.1);
|
||||
}
|
||||
|
||||
header.container-fluid {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
header .masthead {
|
||||
padding-top: 64px;
|
||||
}
|
||||
|
||||
header .contents {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
@media screen and (max-width:768px) {
|
||||
header .contents {
|
||||
padding-left: 15px;
|
||||
padding-right: 15px;
|
||||
}
|
||||
}
|
||||
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.body {
|
||||
margin-top: 90px;
|
||||
}
|
||||
|
||||
section {
|
||||
margin-bottom: 36px;
|
||||
}
|
||||
|
||||
dl {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
font-family: Roboto, sans-serif;
|
||||
font-weight: 400;
|
||||
margin-top: 1.5em;
|
||||
color: #111111;
|
||||
}
|
||||
|
||||
h1.title {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 37px;
|
||||
margin-top: 0;
|
||||
margin-bottom: 0.67em;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 28px;
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
font-size: 17px;
|
||||
min-height: 1.4em;
|
||||
}
|
||||
|
||||
.title-description .subtitle {
|
||||
white-space: nowrap;
|
||||
overflow-x: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
p {
|
||||
margin-bottom: 1em;
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
a {
|
||||
color: #0175C2;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #13B9FD;
|
||||
}
|
||||
|
||||
pre.prettyprint {
|
||||
font-family: 'Source Code Pro', Menlo, monospace;
|
||||
color: black;
|
||||
border-radius: 0;
|
||||
font-size: 15px;
|
||||
word-wrap: normal;
|
||||
line-height: 1.4;
|
||||
border: 0;
|
||||
margin: 16px 0 16px 0;
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
pre code {
|
||||
white-space: pre;
|
||||
word-wrap: initial;
|
||||
font-size: 100%
|
||||
}
|
||||
|
||||
.fixed {
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
pre {
|
||||
border: 1px solid #ddd;
|
||||
background-color: #eee;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
code {
|
||||
font-family: 'Source Code Pro', Menlo, monospace;
|
||||
/* overriding bootstrap */
|
||||
color: inherit;
|
||||
padding: 0.2em 0.4em;
|
||||
font-size: 85%;
|
||||
background-color: rgba(27,31,35,0.05);
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
@media(max-width: 768px) {
|
||||
nav .container {
|
||||
width: 100%
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
pre {
|
||||
margin: 16px 0;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 768px) {
|
||||
ul.subnav li {
|
||||
font-size: 17px;
|
||||
}
|
||||
}
|
||||
|
||||
header h1 {
|
||||
font-weight: 400;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
header a,
|
||||
header p,
|
||||
header li {
|
||||
color: #111111;
|
||||
}
|
||||
|
||||
header a:hover {
|
||||
color: #0175C2;
|
||||
}
|
||||
|
||||
header h1 .kind {
|
||||
color: #555;
|
||||
}
|
||||
|
||||
dt {
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
dd {
|
||||
color: #212121;
|
||||
margin-bottom: 1em;
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
dd.callable, dd.constant, dd.property {
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
dd p {
|
||||
overflow-x: hidden;
|
||||
text-overflow: ellipsis;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
/* indents wrapped lines */
|
||||
section.summary dt {
|
||||
margin-left: 24px;
|
||||
text-indent: -24px;
|
||||
}
|
||||
|
||||
.dl-horizontal dd {
|
||||
margin-left: initial;
|
||||
}
|
||||
|
||||
dl.dl-horizontal dt {
|
||||
font-style: normal;
|
||||
text-align: left;
|
||||
color: #727272;
|
||||
margin-right: 20px;
|
||||
width: initial;
|
||||
}
|
||||
|
||||
dt .name {
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
dl dt.callable .name {
|
||||
float: none;
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.parameter {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.type-parameter {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.multi-line-signature .type-parameter .parameter {
|
||||
margin-left: 0px;
|
||||
display: unset;
|
||||
}
|
||||
|
||||
.signature {
|
||||
color: #727272;
|
||||
}
|
||||
|
||||
.signature a {
|
||||
/* 50% mix of default-primary-color and primary-text-color. */
|
||||
color: #4674a2;
|
||||
}
|
||||
|
||||
.optional {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.undocumented {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.is-const {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.deprecated {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
|
||||
.category.linked {
|
||||
font-weight: bold;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
/* Colors for category based on categoryOrder in dartdoc_options.config. */
|
||||
.category.cp-0 {
|
||||
background-color: #54b7c4
|
||||
}
|
||||
|
||||
.category.cp-1 {
|
||||
background-color: #54c47f
|
||||
}
|
||||
|
||||
.category.cp-2 {
|
||||
background-color: #c4c254
|
||||
}
|
||||
|
||||
.category.cp-3 {
|
||||
background-color: #c49f54
|
||||
}
|
||||
|
||||
.category.cp-4 {
|
||||
background-color: #c45465
|
||||
}
|
||||
|
||||
.category.cp-5 {
|
||||
background-color: #c454c4
|
||||
}
|
||||
|
||||
.category a {
|
||||
color: white;
|
||||
}
|
||||
|
||||
.category {
|
||||
padding: 2px 4px;
|
||||
font-size: 12px;
|
||||
border-radius: 4px;
|
||||
background-color: #999;
|
||||
text-transform: uppercase;
|
||||
color: white;
|
||||
opacity: .5;
|
||||
}
|
||||
|
||||
h1 .category {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.source-link {
|
||||
padding: 18px 4px;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.source-link .material-icons {
|
||||
font-size: 18px;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.source-link {
|
||||
padding: 7px 2px;
|
||||
font-size: 10px;
|
||||
}
|
||||
}
|
||||
|
||||
#external-links {
|
||||
float: right;
|
||||
}
|
||||
|
||||
.btn-group {
|
||||
position: relative;
|
||||
display: inline-flex;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
p.firstline {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
footer {
|
||||
color: #fff;
|
||||
background-color: #111111;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
footer p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
footer .no-break {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
footer .container,
|
||||
footer .container-fluid {
|
||||
padding-left: 0;
|
||||
padding-right: 0;
|
||||
}
|
||||
|
||||
footer a, footer a:hover {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.markdown.desc {
|
||||
max-width: 700px;
|
||||
}
|
||||
|
||||
.markdown h1 {
|
||||
font-size: 24px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.markdown h2 {
|
||||
font-size: 20px;
|
||||
margin-top: 24px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.markdown h3 {
|
||||
font-size: 18px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.markdown h4 {
|
||||
font-size: 16px;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.markdown li p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.gt-separated {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.gt-separated li {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.gt-separated li:before {
|
||||
background-image: url("data:image/svg+xml;utf8,<svg xmlns='http://www.w3.org/2000/svg' width='16' height='16' viewBox='0 0 16 16'><path fill='%23DDDDDD' d='M6.7,4L5.7,4.9L8.8,8l-3.1,3.1L6.7,12l4-4L6.7,4z'/></svg>");
|
||||
background-position: center;
|
||||
content: "\00a0";
|
||||
margin: 0 6px 0 4px;
|
||||
padding: 0 3px 0 0;
|
||||
}
|
||||
|
||||
.gt-separated.dark li:before {
|
||||
background-image: url("data:image/svg+xml;utf8,<svg xmlns='http://www.w3.org/2000/svg' width='16' height='16' viewBox='0 0 16 16'><path fill='%23727272' d='M6.7,4L5.7,4.9L8.8,8l-3.1,3.1L6.7,12l4-4L6.7,4z'/></svg>");
|
||||
}
|
||||
|
||||
.gt-separated li:first-child:before {
|
||||
background-image: none;
|
||||
content: "";
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* The slug line under a declaration for things like "const", "read-only", etc. */
|
||||
.features {
|
||||
font-style: italic;
|
||||
color: #727272;
|
||||
}
|
||||
|
||||
.multi-line-signature {
|
||||
font-size: 17px;
|
||||
color: #727272;
|
||||
}
|
||||
|
||||
.multi-line-signature .parameter {
|
||||
margin-left: 24px;
|
||||
display: block;
|
||||
}
|
||||
|
||||
.breadcrumbs {
|
||||
padding: 0;
|
||||
margin: 8px 0 8px 0;
|
||||
white-space: nowrap;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 768px) {
|
||||
nav ol.breadcrumbs {
|
||||
float: left;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (max-width: 768px) {
|
||||
.breadcrumbs {
|
||||
margin: 0 0 24px 0;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
}
|
||||
|
||||
.self-crumb {
|
||||
color: #555;
|
||||
}
|
||||
|
||||
.self-name {
|
||||
color: #555;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.annotation-list {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.comma-separated {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.comma-separated li {
|
||||
display: inline;
|
||||
}
|
||||
|
||||
.comma-separated li:after {
|
||||
content: ", ";
|
||||
}
|
||||
|
||||
.comma-separated li:last-child:after {
|
||||
content: "";
|
||||
}
|
||||
|
||||
.end-with-period li:last-child:after {
|
||||
content: ".";
|
||||
}
|
||||
|
||||
.container > section:first-child {
|
||||
border: 0;
|
||||
}
|
||||
|
||||
.constructor-modifier {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
section.multi-line-signature div.parameters {
|
||||
margin-left: 24px;
|
||||
}
|
||||
|
||||
/* subnav styles */
|
||||
|
||||
ul.subnav {
|
||||
overflow: auto;
|
||||
white-space: nowrap;
|
||||
padding-left: 0;
|
||||
min-height: 25px;
|
||||
}
|
||||
|
||||
ul.subnav::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
ul.subnav li {
|
||||
display: inline-block;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
ul.subnav li a {
|
||||
color: #111;
|
||||
}
|
||||
|
||||
ul.subnav li {
|
||||
margin-right: 24px;
|
||||
}
|
||||
|
||||
ul.subnav li:last-of-type {
|
||||
margin-right: 0;
|
||||
}
|
||||
|
||||
@media(max-width: 768px) {
|
||||
ul.subnav li {
|
||||
margin-right: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
/* sidebar styles */
|
||||
|
||||
.sidebar ol {
|
||||
list-style: none;
|
||||
line-height: 22px;
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
padding: 0 0 15px 0;
|
||||
}
|
||||
|
||||
.sidebar h5 a,
|
||||
.sidebar h5 a:hover {
|
||||
color: #727272;
|
||||
}
|
||||
|
||||
.sidebar h5,
|
||||
.sidebar ol li {
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
padding: 3px 0;
|
||||
}
|
||||
|
||||
.sidebar h5 {
|
||||
color: #727272;
|
||||
font-size: 18px;
|
||||
margin: 0 0 25px 0;
|
||||
padding-top: 0;
|
||||
}
|
||||
|
||||
.sidebar ol li.section-title {
|
||||
font-size: 18px;
|
||||
font-weight: normal;
|
||||
text-transform: uppercase;
|
||||
padding-top: 25px;
|
||||
}
|
||||
|
||||
.sidebar ol li.section-subtitle a {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
.sidebar ol li.section-subtitle {
|
||||
font-weight: 400;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.sidebar ol li.section-subitem {
|
||||
margin-left: 12px;
|
||||
}
|
||||
|
||||
.sidebar ol li:first-child {
|
||||
padding-top: 0;
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
button {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
#sidenav-left-toggle {
|
||||
display: none;
|
||||
vertical-align: text-bottom;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
/* left-nav disappears, and can transition in from the left */
|
||||
@media screen and (max-width:768px) {
|
||||
#sidenav-left-toggle {
|
||||
display: inline;
|
||||
background: no-repeat url("data:image/svg+xml;utf8,<svg xmlns='http://www.w3.org/2000/svg' width='24' height='24' viewBox='0 0 24 24'><path fill='%23111' d='M3 18h18v-2H3v2zm0-5h18v-2H3v2zm0-7v2h18V6H3z'/></svg>");
|
||||
background-position: center;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
border: none;
|
||||
margin-right: 24px;
|
||||
}
|
||||
|
||||
#overlay-under-drawer.active {
|
||||
opacity: 0.4;
|
||||
height: 100%;
|
||||
z-index: 1999;
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: black;
|
||||
display: block;
|
||||
}
|
||||
|
||||
.sidebar-offcanvas-left {
|
||||
left: -100%;
|
||||
position: fixed;
|
||||
-webkit-transition:all .25s ease-out;
|
||||
-o-transition:all .25s ease-out;
|
||||
transition:all .25s ease-out;
|
||||
z-index: 2000;
|
||||
top: 0;
|
||||
width: 280px; /* works all the way down to an iphone 4 */
|
||||
height: 90%;
|
||||
background-color: white;
|
||||
overflow-y: scroll; /* TODO: how to hide scroll bars? */
|
||||
padding: 10px;
|
||||
margin: 10px 10px;
|
||||
box-shadow: 5px 5px 5px 5px #444444;
|
||||
visibility: hidden; /* shown by Javascript after scroll position restore */
|
||||
}
|
||||
|
||||
ol#sidebar-nav {
|
||||
font-size: 18px;
|
||||
white-space: pre-line;
|
||||
}
|
||||
|
||||
.sidebar-offcanvas-left.active {
|
||||
left: 0; /* this animates our drawer into the page */
|
||||
}
|
||||
|
||||
.self-name {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
|
||||
.sidebar-offcanvas-left h5 {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.sidebar-offcanvas-left h5:last-of-type {
|
||||
border: 0;
|
||||
margin-bottom: 25px;
|
||||
}
|
||||
|
||||
/* the right nav disappears out of view when the window shrinks */
|
||||
@media screen and (max-width: 992px) {
|
||||
.sidebar-offcanvas-right {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
#overlay-under-drawer {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* find-as-you-type search box */
|
||||
|
||||
/* override bootstrap defaults */
|
||||
.form-control {
|
||||
border-radius: 0;
|
||||
border: 0;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 768px) {
|
||||
form.search {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.typeahead,
|
||||
.tt-query,
|
||||
.tt-hint {
|
||||
width: 200px;
|
||||
height: 20px;
|
||||
padding: 2px 7px 1px 7px;
|
||||
line-height: 20px;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.typeahead {
|
||||
background-color: #fff;
|
||||
border-radius: 2px;
|
||||
}
|
||||
|
||||
.tt-query {
|
||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
||||
-moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
|
||||
}
|
||||
|
||||
.tt-hint {
|
||||
color: #999
|
||||
}
|
||||
|
||||
.navbar-right .tt-menu {
|
||||
right:0;
|
||||
left: inherit !important;
|
||||
width: 422px;
|
||||
max-height: 250px;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
.tt-menu {
|
||||
font-size: 14px;
|
||||
margin: 0;
|
||||
padding: 8px 0;
|
||||
background-color: #fff;
|
||||
border: 1px solid #ccc;
|
||||
border: 1px solid rgba(0, 0, 0, 0.2);
|
||||
-webkit-box-shadow: 0 5px 10px rgba(0,0,0,.2);
|
||||
-moz-box-shadow: 0 5px 10px rgba(0,0,0,.2);
|
||||
box-shadow: 0 5px 10px rgba(0,0,0,.2);
|
||||
}
|
||||
|
||||
.tt-suggestion {
|
||||
padding: 3px 20px;
|
||||
color: #212121;
|
||||
}
|
||||
|
||||
.tt-suggestion:hover {
|
||||
cursor: pointer;
|
||||
color: #fff;
|
||||
background-color: #0097cf;
|
||||
}
|
||||
|
||||
.tt-suggestion:hover .search-from-lib {
|
||||
color: #ddd;
|
||||
}
|
||||
|
||||
.tt-suggestion.tt-cursor {
|
||||
color: #fff;
|
||||
background-color: #0097cf;
|
||||
}
|
||||
|
||||
.tt-suggestion.tt-cursor .search-from-lib {
|
||||
color: #ddd;
|
||||
}
|
||||
|
||||
.tt-suggestion p {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.search-from-lib {
|
||||
font-style: italic;
|
||||
color: gray;
|
||||
}
|
||||
|
||||
#search-box {
|
||||
background-color: #ffffff;
|
||||
}
|
||||
|
||||
.search-body {
|
||||
border: 1px solid #7f7f7f;
|
||||
max-width: 400px;
|
||||
box-shadow: 3px 3px 5px rgba(0,0,0,0.1);
|
||||
}
|
||||
|
||||
section#setter {
|
||||
border-top: 1px solid #ddd;
|
||||
padding-top: 36px;
|
||||
}
|
||||
|
||||
li.inherited a {
|
||||
opacity: 0.65;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
#instance-methods dt.inherited .name,
|
||||
#instance-properties dt.inherited .name,
|
||||
#operators dt.inherited .name {
|
||||
font-weight: 300;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
#instance-methods dt.inherited .signature,
|
||||
#instance-properties dt.inherited .signature,
|
||||
#operators dt.inherited .signature {
|
||||
font-weight: 300;
|
||||
}
|
||||
|
||||
@media print {
|
||||
.subnav, .sidebar {
|
||||
display:none;
|
||||
}
|
||||
|
||||
a[href]:after {
|
||||
content:"" !important;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,27 @@
|
||||
-- In this file, change "/path/to/" to the path where you installed clang-format
|
||||
-- and save it to ~/Library/Application Support/BBEdit/Scripts. You can then
|
||||
-- select the script from the Script menu and clang-format will format the
|
||||
-- selection. Note that you can rename the menu item by renaming the script, and
|
||||
-- can assign the menu item a keyboard shortcut in the BBEdit preferences, under
|
||||
-- Menus & Shortcuts.
|
||||
on urlToPOSIXPath(theURL)
|
||||
return do shell script "python -c \"import urllib, urlparse, sys; print urllib.unquote(urlparse.urlparse(sys.argv[1])[2])\" " & quoted form of theURL
|
||||
end urlToPOSIXPath
|
||||
|
||||
tell application "BBEdit"
|
||||
set selectionOffset to characterOffset of selection
|
||||
set selectionLength to length of selection
|
||||
set fileURL to URL of text document 1
|
||||
end tell
|
||||
|
||||
set filePath to urlToPOSIXPath(fileURL)
|
||||
set newContents to do shell script "/path/to/clang-format -offset=" & selectionOffset & " -length=" & selectionLength & " " & quoted form of filePath
|
||||
|
||||
tell application "BBEdit"
|
||||
-- "set contents of text document 1 to newContents" scrolls to the bottom while
|
||||
-- replacing a selection flashes a bit but doesn't affect the scroll position.
|
||||
set currentLength to length of contents of text document 1
|
||||
select characters 1 thru currentLength of text document 1
|
||||
set text of selection to newContents
|
||||
select characters selectionOffset thru (selectionOffset + selectionLength - 1) of text document 1
|
||||
end tell
|
||||
@ -0,0 +1,193 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# ===- clang-format-diff.py - ClangFormat Diff Reformatter ----*- python -*--===#
|
||||
#
|
||||
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
# See https://llvm.org/LICENSE.txt for license information.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
#
|
||||
# ===------------------------------------------------------------------------===#
|
||||
|
||||
"""
|
||||
This script reads input from a unified diff and reformats all the changed
|
||||
lines. This is useful to reformat all the lines touched by a specific patch.
|
||||
Example usage for git/svn users:
|
||||
|
||||
git diff -U0 --no-color --relative HEAD^ | {clang_format_diff} -p1 -i
|
||||
svn diff --diff-cmd=diff -x-U0 | {clang_format_diff} -i
|
||||
|
||||
It should be noted that the filename contained in the diff is used unmodified
|
||||
to determine the source file to update. Users calling this script directly
|
||||
should be careful to ensure that the path in the diff is correct relative to the
|
||||
current working directory.
|
||||
"""
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import argparse
|
||||
import difflib
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
if sys.version_info.major >= 3:
|
||||
from io import StringIO
|
||||
else:
|
||||
from io import BytesIO as StringIO
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__.format(clang_format_diff="%(prog)s"),
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="apply edits to files instead of displaying a diff",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
metavar="NUM",
|
||||
default=0,
|
||||
help="strip the smallest prefix containing P slashes",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-regex",
|
||||
metavar="PATTERN",
|
||||
default=None,
|
||||
help="custom pattern selecting file paths to reformat "
|
||||
"(case sensitive, overrides -iregex)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-iregex",
|
||||
metavar="PATTERN",
|
||||
default=r".*\.(?:cpp|cc|c\+\+|cxx|cppm|ccm|cxxm|c\+\+m|c|cl|h|hh|hpp"
|
||||
r"|hxx|m|mm|inc|js|ts|proto|protodevel|java|cs|json|s?vh?)",
|
||||
help="custom pattern selecting file paths to reformat "
|
||||
"(case insensitive, overridden by -regex)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-sort-includes",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="let clang-format sort include blocks",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="be more verbose, ineffective without -i",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-style",
|
||||
help="formatting style to apply (LLVM, GNU, Google, Chromium, "
|
||||
"Microsoft, Mozilla, WebKit)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-fallback-style",
|
||||
help="The name of the predefined style used as a"
|
||||
"fallback in case clang-format is invoked with"
|
||||
"-style=file, but can not find the .clang-format"
|
||||
"file to use.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-binary",
|
||||
default="clang-format",
|
||||
help="location of binary to use for clang-format",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Extract changed lines for each file.
|
||||
filename = None
|
||||
lines_by_file = {}
|
||||
for line in sys.stdin:
|
||||
match = re.search(r"^\+\+\+\ (.*?/){%s}(\S*)" % args.p, line)
|
||||
if match:
|
||||
filename = match.group(2)
|
||||
if filename is None:
|
||||
continue
|
||||
|
||||
if args.regex is not None:
|
||||
if not re.match("^%s$" % args.regex, filename):
|
||||
continue
|
||||
else:
|
||||
if not re.match("^%s$" % args.iregex, filename, re.IGNORECASE):
|
||||
continue
|
||||
|
||||
match = re.search(r"^@@.*\+(\d+)(?:,(\d+))?", line)
|
||||
if match:
|
||||
start_line = int(match.group(1))
|
||||
line_count = 1
|
||||
if match.group(2):
|
||||
line_count = int(match.group(2))
|
||||
# The input is something like
|
||||
#
|
||||
# @@ -1, +0,0 @@
|
||||
#
|
||||
# which means no lines were added.
|
||||
if line_count == 0:
|
||||
continue
|
||||
# Also format lines range if line_count is 0 in case of deleting
|
||||
# surrounding statements.
|
||||
end_line = start_line
|
||||
if line_count != 0:
|
||||
end_line += line_count - 1
|
||||
lines_by_file.setdefault(filename, []).extend(
|
||||
["-lines", str(start_line) + ":" + str(end_line)]
|
||||
)
|
||||
|
||||
# Reformat files containing changes in place.
|
||||
for filename, lines in lines_by_file.items():
|
||||
if args.i and args.verbose:
|
||||
print("Formatting {}".format(filename))
|
||||
command = [args.binary, filename]
|
||||
if args.i:
|
||||
command.append("-i")
|
||||
if args.sort_includes:
|
||||
command.append("-sort-includes")
|
||||
command.extend(lines)
|
||||
if args.style:
|
||||
command.extend(["-style", args.style])
|
||||
if args.fallback_style:
|
||||
command.extend(["-fallback-style", args.fallback_style])
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=None,
|
||||
stdin=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
)
|
||||
except OSError as e:
|
||||
# Give the user more context when clang-format isn't
|
||||
# found/isn't executable, etc.
|
||||
raise RuntimeError(
|
||||
'Failed to run "%s" - %s"' % (" ".join(command), e.strerror)
|
||||
)
|
||||
|
||||
stdout, stderr = p.communicate()
|
||||
if p.returncode != 0:
|
||||
sys.exit(p.returncode)
|
||||
|
||||
if not args.i:
|
||||
with open(filename) as f:
|
||||
code = f.readlines()
|
||||
formatted_code = StringIO(stdout).readlines()
|
||||
diff = difflib.unified_diff(
|
||||
code,
|
||||
formatted_code,
|
||||
filename,
|
||||
filename,
|
||||
"(before formatting)",
|
||||
"(after formatting)",
|
||||
)
|
||||
diff_string = "".join(diff)
|
||||
if len(diff_string) > 0:
|
||||
sys.stdout.write(diff_string)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -0,0 +1,73 @@
|
||||
# This file is a minimal clang-format sublime-integration. To install:
|
||||
# - Change 'binary' if clang-format is not on the path (see below).
|
||||
# - Put this file into your sublime Packages directory, e.g. on Linux:
|
||||
# ~/.config/sublime-text-2/Packages/User/clang-format-sublime.py
|
||||
# - Add a key binding:
|
||||
# { "keys": ["ctrl+shift+c"], "command": "clang_format" },
|
||||
#
|
||||
# With this integration you can press the bound key and clang-format will
|
||||
# format the current lines and selections for all cursor positions. The lines
|
||||
# or regions are extended to the next bigger syntactic entities.
|
||||
#
|
||||
# It operates on the current, potentially unsaved buffer and does not create
|
||||
# or save any files. To revert a formatting, just undo.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
import sublime
|
||||
import sublime_plugin
|
||||
import subprocess
|
||||
|
||||
# Change this to the full path if clang-format is not on the path.
|
||||
binary = "clang-format"
|
||||
|
||||
# Change this to format according to other formatting styles. See the output of
|
||||
# 'clang-format --help' for a list of supported styles. The default looks for
|
||||
# a '.clang-format' or '_clang-format' file to indicate the style that should be
|
||||
# used.
|
||||
style = None
|
||||
|
||||
|
||||
class ClangFormatCommand(sublime_plugin.TextCommand):
|
||||
def run(self, edit):
|
||||
encoding = self.view.encoding()
|
||||
if encoding == "Undefined":
|
||||
encoding = "utf-8"
|
||||
regions = []
|
||||
command = [binary]
|
||||
if style:
|
||||
command.extend(["-style", style])
|
||||
for region in self.view.sel():
|
||||
regions.append(region)
|
||||
region_offset = min(region.a, region.b)
|
||||
region_length = abs(region.b - region.a)
|
||||
command.extend(
|
||||
[
|
||||
"-offset",
|
||||
str(region_offset),
|
||||
"-length",
|
||||
str(region_length),
|
||||
"-assume-filename",
|
||||
str(self.view.file_name()),
|
||||
]
|
||||
)
|
||||
old_viewport_position = self.view.viewport_position()
|
||||
buf = self.view.substr(sublime.Region(0, self.view.size()))
|
||||
p = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE,
|
||||
)
|
||||
output, error = p.communicate(buf.encode(encoding))
|
||||
if error:
|
||||
print(error)
|
||||
self.view.replace(
|
||||
edit, sublime.Region(0, self.view.size()), output.decode(encoding)
|
||||
)
|
||||
self.view.sel().clear()
|
||||
for region in regions:
|
||||
self.view.sel().add(region)
|
||||
# FIXME: Without the 10ms delay, the viewport sometimes jumps.
|
||||
sublime.set_timeout(
|
||||
lambda: self.view.set_viewport_position(old_viewport_position, False), 10
|
||||
)
|
||||
@ -0,0 +1,220 @@
|
||||
;;; clang-format.el --- Format code using clang-format -*- lexical-binding: t; -*-
|
||||
|
||||
;; Keywords: tools, c
|
||||
;; Package-Requires: ((cl-lib "0.3"))
|
||||
;; SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
|
||||
;;; Commentary:
|
||||
|
||||
;; This package allows to filter code through clang-format to fix its formatting.
|
||||
;; clang-format is a tool that formats C/C++/Obj-C code according to a set of
|
||||
;; style options, see <http://clang.llvm.org/docs/ClangFormatStyleOptions.html>.
|
||||
;; Note that clang-format 3.4 or newer is required.
|
||||
|
||||
;; clang-format.el is available via MELPA and can be installed via
|
||||
;;
|
||||
;; M-x package-install clang-format
|
||||
;;
|
||||
;; when ("melpa" . "http://melpa.org/packages/") is included in
|
||||
;; `package-archives'. Alternatively, ensure the directory of this
|
||||
;; file is in your `load-path' and add
|
||||
;;
|
||||
;; (require 'clang-format)
|
||||
;;
|
||||
;; to your .emacs configuration.
|
||||
|
||||
;; You may also want to bind `clang-format-region' to a key:
|
||||
;;
|
||||
;; (global-set-key [C-M-tab] 'clang-format-region)
|
||||
|
||||
;;; Code:
|
||||
|
||||
(require 'cl-lib)
|
||||
(require 'xml)
|
||||
|
||||
(defgroup clang-format nil
|
||||
"Format code using clang-format."
|
||||
:group 'tools)
|
||||
|
||||
(defcustom clang-format-executable
|
||||
(or (executable-find "clang-format")
|
||||
"clang-format")
|
||||
"Location of the clang-format executable.
|
||||
|
||||
A string containing the name or the full path of the executable."
|
||||
:group 'clang-format
|
||||
:type '(file :must-match t)
|
||||
:risky t)
|
||||
|
||||
(defcustom clang-format-style nil
|
||||
"Style argument to pass to clang-format.
|
||||
|
||||
By default clang-format will load the style configuration from
|
||||
a file named .clang-format located in one of the parent directories
|
||||
of the buffer."
|
||||
:group 'clang-format
|
||||
:type '(choice (string) (const nil))
|
||||
:safe #'stringp)
|
||||
(make-variable-buffer-local 'clang-format-style)
|
||||
|
||||
(defcustom clang-format-fallback-style "none"
|
||||
"Fallback style to pass to clang-format.
|
||||
|
||||
This style will be used if clang-format-style is set to \"file\"
|
||||
and no .clang-format is found in the directory of the buffer or
|
||||
one of parent directories. Set to \"none\" to disable formatting
|
||||
in such buffers."
|
||||
:group 'clang-format
|
||||
:type 'string
|
||||
:safe #'stringp)
|
||||
(make-variable-buffer-local 'clang-format-fallback-style)
|
||||
|
||||
(defun clang-format--extract (xml-node)
|
||||
"Extract replacements and cursor information from XML-NODE."
|
||||
(unless (and (listp xml-node) (eq (xml-node-name xml-node) 'replacements))
|
||||
(error "Expected <replacements> node"))
|
||||
(let ((nodes (xml-node-children xml-node))
|
||||
(incomplete-format (xml-get-attribute xml-node 'incomplete_format))
|
||||
replacements
|
||||
cursor)
|
||||
(dolist (node nodes)
|
||||
(when (listp node)
|
||||
(let* ((children (xml-node-children node))
|
||||
(text (car children)))
|
||||
(cl-case (xml-node-name node)
|
||||
(replacement
|
||||
(let* ((offset (xml-get-attribute-or-nil node 'offset))
|
||||
(length (xml-get-attribute-or-nil node 'length)))
|
||||
(when (or (null offset) (null length))
|
||||
(error "<replacement> node does not have offset and length attributes"))
|
||||
(when (cdr children)
|
||||
(error "More than one child node in <replacement> node"))
|
||||
|
||||
(setq offset (string-to-number offset))
|
||||
(setq length (string-to-number length))
|
||||
(push (list offset length text) replacements)))
|
||||
(cursor
|
||||
(setq cursor (string-to-number text)))))))
|
||||
|
||||
;; Sort by decreasing offset, length.
|
||||
(setq replacements (sort (delq nil replacements)
|
||||
(lambda (a b)
|
||||
(or (> (car a) (car b))
|
||||
(and (= (car a) (car b))
|
||||
(> (cadr a) (cadr b)))))))
|
||||
|
||||
(list replacements cursor (string= incomplete-format "true"))))
|
||||
|
||||
(defun clang-format--replace (offset length &optional text)
|
||||
"Replace the region defined by OFFSET and LENGTH with TEXT.
|
||||
OFFSET and LENGTH are measured in bytes, not characters. OFFSET
|
||||
is a zero-based file offset, assuming ‘utf-8-unix’ coding."
|
||||
(let ((start (clang-format--filepos-to-bufferpos offset 'exact 'utf-8-unix))
|
||||
(end (clang-format--filepos-to-bufferpos (+ offset length) 'exact
|
||||
'utf-8-unix)))
|
||||
(goto-char start)
|
||||
(delete-region start end)
|
||||
(when text
|
||||
(insert text))))
|
||||
|
||||
;; ‘bufferpos-to-filepos’ and ‘filepos-to-bufferpos’ are new in Emacs 25.1.
|
||||
;; Provide fallbacks for older versions.
|
||||
(defalias 'clang-format--bufferpos-to-filepos
|
||||
(if (fboundp 'bufferpos-to-filepos)
|
||||
'bufferpos-to-filepos
|
||||
(lambda (position &optional _quality _coding-system)
|
||||
(1- (position-bytes position)))))
|
||||
|
||||
(defalias 'clang-format--filepos-to-bufferpos
|
||||
(if (fboundp 'filepos-to-bufferpos)
|
||||
'filepos-to-bufferpos
|
||||
(lambda (byte &optional _quality _coding-system)
|
||||
(byte-to-position (1+ byte)))))
|
||||
|
||||
;;;###autoload
|
||||
(defun clang-format-region (start end &optional style assume-file-name)
|
||||
"Use clang-format to format the code between START and END according to STYLE.
|
||||
If called interactively uses the region or the current statement if there is no
|
||||
no active region. If no STYLE is given uses `clang-format-style'. Use
|
||||
ASSUME-FILE-NAME to locate a style config file, if no ASSUME-FILE-NAME is given
|
||||
uses the function `buffer-file-name'."
|
||||
(interactive
|
||||
(if (use-region-p)
|
||||
(list (region-beginning) (region-end))
|
||||
(list (point) (point))))
|
||||
|
||||
(unless style
|
||||
(setq style clang-format-style))
|
||||
|
||||
(unless assume-file-name
|
||||
(setq assume-file-name (buffer-file-name (buffer-base-buffer))))
|
||||
|
||||
(let ((file-start (clang-format--bufferpos-to-filepos start 'approximate
|
||||
'utf-8-unix))
|
||||
(file-end (clang-format--bufferpos-to-filepos end 'approximate
|
||||
'utf-8-unix))
|
||||
(cursor (clang-format--bufferpos-to-filepos (point) 'exact 'utf-8-unix))
|
||||
(temp-buffer (generate-new-buffer " *clang-format-temp*"))
|
||||
(temp-file (make-temp-file "clang-format"))
|
||||
;; Output is XML, which is always UTF-8. Input encoding should match
|
||||
;; the encoding used to convert between buffer and file positions,
|
||||
;; otherwise the offsets calculated above are off. For simplicity, we
|
||||
;; always use ‘utf-8-unix’ and ignore the buffer coding system.
|
||||
(default-process-coding-system '(utf-8-unix . utf-8-unix)))
|
||||
(unwind-protect
|
||||
(let ((status (apply #'call-process-region
|
||||
nil nil clang-format-executable
|
||||
nil `(,temp-buffer ,temp-file) nil
|
||||
`("-output-replacements-xml"
|
||||
;; Guard against a nil assume-file-name.
|
||||
;; If the clang-format option -assume-filename
|
||||
;; is given a blank string it will crash as per
|
||||
;; the following bug report
|
||||
;; https://bugs.llvm.org/show_bug.cgi?id=34667
|
||||
,@(and assume-file-name
|
||||
(list "-assume-filename" assume-file-name))
|
||||
,@(and style (list "-style" style))
|
||||
"-fallback-style" ,clang-format-fallback-style
|
||||
"-offset" ,(number-to-string file-start)
|
||||
"-length" ,(number-to-string (- file-end file-start))
|
||||
"-cursor" ,(number-to-string cursor))))
|
||||
(stderr (with-temp-buffer
|
||||
(unless (zerop (cadr (insert-file-contents temp-file)))
|
||||
(insert ": "))
|
||||
(buffer-substring-no-properties
|
||||
(point-min) (line-end-position)))))
|
||||
(cond
|
||||
((stringp status)
|
||||
(error "(clang-format killed by signal %s%s)" status stderr))
|
||||
((not (zerop status))
|
||||
(error "(clang-format failed with code %d%s)" status stderr)))
|
||||
|
||||
(cl-destructuring-bind (replacements cursor incomplete-format)
|
||||
(with-current-buffer temp-buffer
|
||||
(clang-format--extract (car (xml-parse-region))))
|
||||
(save-excursion
|
||||
(dolist (rpl replacements)
|
||||
(apply #'clang-format--replace rpl)))
|
||||
(when cursor
|
||||
(goto-char (clang-format--filepos-to-bufferpos cursor 'exact
|
||||
'utf-8-unix)))
|
||||
(if incomplete-format
|
||||
(message "(clang-format: incomplete (syntax errors)%s)" stderr)
|
||||
(message "(clang-format: success%s)" stderr))))
|
||||
(delete-file temp-file)
|
||||
(when (buffer-name temp-buffer) (kill-buffer temp-buffer)))))
|
||||
|
||||
;;;###autoload
|
||||
(defun clang-format-buffer (&optional style assume-file-name)
|
||||
"Use clang-format to format the current buffer according to STYLE.
|
||||
If no STYLE is given uses `clang-format-style'. Use ASSUME-FILE-NAME
|
||||
to locate a style config file. If no ASSUME-FILE-NAME is given uses
|
||||
the function `buffer-file-name'."
|
||||
(interactive)
|
||||
(clang-format-region (point-min) (point-max) style assume-file-name))
|
||||
|
||||
;;;###autoload
|
||||
(defalias 'clang-format 'clang-format-region)
|
||||
|
||||
(provide 'clang-format)
|
||||
;;; clang-format.el ends here
|
||||
@ -0,0 +1,168 @@
|
||||
# This file is a minimal clang-format vim-integration. To install:
|
||||
# - Change 'binary' if clang-format is not on the path (see below).
|
||||
# - Add to your .vimrc:
|
||||
#
|
||||
# if has('python')
|
||||
# map <C-I> :pyf <path-to-this-file>/clang-format.py<cr>
|
||||
# imap <C-I> <c-o>:pyf <path-to-this-file>/clang-format.py<cr>
|
||||
# elseif has('python3')
|
||||
# map <C-I> :py3f <path-to-this-file>/clang-format.py<cr>
|
||||
# imap <C-I> <c-o>:py3f <path-to-this-file>/clang-format.py<cr>
|
||||
# endif
|
||||
#
|
||||
# The if-elseif-endif conditional should pick either the python3 or python2
|
||||
# integration depending on your vim setup.
|
||||
#
|
||||
# The first mapping enables clang-format for NORMAL and VISUAL mode, the second
|
||||
# mapping adds support for INSERT mode. Change "C-I" to another binding if you
|
||||
# need clang-format on a different key (C-I stands for Ctrl+i).
|
||||
#
|
||||
# With this integration you can press the bound key and clang-format will
|
||||
# format the current line in NORMAL and INSERT mode or the selected region in
|
||||
# VISUAL mode. The line or region is extended to the next bigger syntactic
|
||||
# entity.
|
||||
#
|
||||
# You can also pass in the variable "l:lines" to choose the range for
|
||||
# formatting. This variable can either contain "<start line>:<end line>" or
|
||||
# "all" to format the full file. So, to format the full file, write a function
|
||||
# like:
|
||||
# :function FormatFile()
|
||||
# : let l:lines="all"
|
||||
# : if has('python')
|
||||
# : pyf <path-to-this-file>/clang-format.py
|
||||
# : elseif has('python3')
|
||||
# : py3f <path-to-this-file>/clang-format.py
|
||||
# : endif
|
||||
# :endfunction
|
||||
#
|
||||
# It operates on the current, potentially unsaved buffer and does not create
|
||||
# or save any files. To revert a formatting, just undo.
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import difflib
|
||||
import json
|
||||
import os.path
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
import vim
|
||||
|
||||
# set g:clang_format_path to the path to clang-format if it is not on the path
|
||||
# Change this to the full path if clang-format is not on the path.
|
||||
binary = "clang-format"
|
||||
if vim.eval('exists("g:clang_format_path")') == "1":
|
||||
binary = vim.eval("g:clang_format_path")
|
||||
|
||||
# Change this to format according to other formatting styles. See the output of
|
||||
# 'clang-format --help' for a list of supported styles. The default looks for
|
||||
# a '.clang-format' or '_clang-format' file to indicate the style that should be
|
||||
# used.
|
||||
style = None
|
||||
fallback_style = None
|
||||
if vim.eval('exists("g:clang_format_fallback_style")') == "1":
|
||||
fallback_style = vim.eval("g:clang_format_fallback_style")
|
||||
|
||||
|
||||
def get_buffer(encoding):
|
||||
if platform.python_version_tuple()[0] == "3":
|
||||
return vim.current.buffer
|
||||
return [line.decode(encoding) for line in vim.current.buffer]
|
||||
|
||||
|
||||
def main():
|
||||
# Get the current text.
|
||||
encoding = vim.eval("&encoding")
|
||||
buf = get_buffer(encoding)
|
||||
# Join the buffer into a single string with a terminating newline
|
||||
text = ("\n".join(buf) + "\n").encode(encoding)
|
||||
|
||||
# Determine range to format.
|
||||
if vim.eval('exists("l:lines")') == "1":
|
||||
lines = ["-lines", vim.eval("l:lines")]
|
||||
elif vim.eval('exists("l:formatdiff")') == "1" and os.path.exists(
|
||||
vim.current.buffer.name
|
||||
):
|
||||
with open(vim.current.buffer.name, "r") as f:
|
||||
ondisk = f.read().splitlines()
|
||||
sequence = difflib.SequenceMatcher(None, ondisk, vim.current.buffer)
|
||||
lines = []
|
||||
for op in reversed(sequence.get_opcodes()):
|
||||
if op[0] not in ["equal", "delete"]:
|
||||
lines += ["-lines", "%s:%s" % (op[3] + 1, op[4])]
|
||||
if lines == []:
|
||||
return
|
||||
else:
|
||||
lines = [
|
||||
"-lines",
|
||||
"%s:%s" % (vim.current.range.start + 1, vim.current.range.end + 1),
|
||||
]
|
||||
|
||||
# Convert cursor (line, col) to bytes.
|
||||
# Don't use line2byte: https://github.com/vim/vim/issues/5930
|
||||
_, cursor_line, cursor_col, _ = vim.eval('getpos(".")') # 1-based
|
||||
cursor_byte = 0
|
||||
for line in text.split(b"\n")[: int(cursor_line) - 1]:
|
||||
cursor_byte += len(line) + 1
|
||||
cursor_byte += int(cursor_col) - 1
|
||||
if cursor_byte < 0:
|
||||
print("Couldn't determine cursor position. Is your file empty?")
|
||||
return
|
||||
|
||||
# Avoid flashing an ugly, ugly cmd prompt on Windows when invoking clang-format.
|
||||
startupinfo = None
|
||||
if sys.platform.startswith("win32"):
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
startupinfo.wShowWindow = subprocess.SW_HIDE
|
||||
|
||||
# Call formatter.
|
||||
command = [binary, "-cursor", str(cursor_byte)]
|
||||
if lines != ["-lines", "all"]:
|
||||
command += lines
|
||||
if style:
|
||||
command.extend(["-style", style])
|
||||
if fallback_style:
|
||||
command.extend(["-fallback-style", fallback_style])
|
||||
if vim.current.buffer.name:
|
||||
command.extend(["-assume-filename", vim.current.buffer.name])
|
||||
p = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE,
|
||||
startupinfo=startupinfo,
|
||||
)
|
||||
stdout, stderr = p.communicate(input=text)
|
||||
|
||||
# If successful, replace buffer contents.
|
||||
if stderr:
|
||||
print(stderr)
|
||||
|
||||
if not stdout:
|
||||
print(
|
||||
"No output from clang-format (crashed?).\n"
|
||||
"Please report to bugs.llvm.org."
|
||||
)
|
||||
else:
|
||||
header, content = stdout.split(b"\n", 1)
|
||||
header = json.loads(header.decode("utf-8"))
|
||||
# Strip off the trailing newline (added above).
|
||||
# This maintains trailing empty lines present in the buffer if
|
||||
# the -lines specification requests them to remain unchanged.
|
||||
lines = content.decode(encoding).split("\n")[:-1]
|
||||
sequence = difflib.SequenceMatcher(None, buf, lines)
|
||||
for op in reversed(sequence.get_opcodes()):
|
||||
if op[0] != "equal":
|
||||
vim.current.buffer[op[1] : op[2]] = lines[op[3] : op[4]]
|
||||
if header.get("IncompleteFormat"):
|
||||
print("clang-format: incomplete (syntax errors)")
|
||||
# Convert cursor bytes to (line, col)
|
||||
# Don't use goto: https://github.com/vim/vim/issues/5930
|
||||
cursor_byte = int(header["Cursor"])
|
||||
prefix = content[0:cursor_byte]
|
||||
cursor_line = 1 + prefix.count(b"\n")
|
||||
cursor_column = 1 + len(prefix.rsplit(b"\n", 1)[-1])
|
||||
vim.command("call cursor(%d, %d)" % (cursor_line, cursor_column))
|
||||
|
||||
|
||||
main()
|
||||
@ -0,0 +1,460 @@
|
||||
;;; clang-include-fixer.el --- Emacs integration of the clang include fixer -*- lexical-binding: t; -*-
|
||||
|
||||
;; Keywords: tools, c
|
||||
;; Package-Requires: ((cl-lib "0.5") (json "1.2") (let-alist "1.0.4"))
|
||||
|
||||
;;; Commentary:
|
||||
|
||||
;; This package allows Emacs users to invoke the 'clang-include-fixer' within
|
||||
;; Emacs. 'clang-include-fixer' provides an automated way of adding #include
|
||||
;; directives for missing symbols in one translation unit, see
|
||||
;; <http://clang.llvm.org/extra/clang-include-fixer.html>.
|
||||
|
||||
;;; Code:
|
||||
|
||||
(require 'cl-lib)
|
||||
(require 'json)
|
||||
(require 'let-alist)
|
||||
|
||||
(defgroup clang-include-fixer nil
|
||||
"Clang-based include fixer."
|
||||
:group 'tools)
|
||||
|
||||
(defvar clang-include-fixer-add-include-hook nil
|
||||
"A hook that will be called for every added include.
|
||||
The first argument is the filename of the include, the second argument is
|
||||
non-nil if the include is a system-header.")
|
||||
|
||||
(defcustom clang-include-fixer-executable
|
||||
"clang-include-fixer"
|
||||
"Location of the clang-include-fixer executable.
|
||||
|
||||
A string containing the name or the full path of the executable."
|
||||
:group 'clang-include-fixer
|
||||
:type '(file :must-match t)
|
||||
:risky t)
|
||||
|
||||
(defcustom clang-include-fixer-input-format
|
||||
'yaml
|
||||
"Input format for clang-include-fixer.
|
||||
This string is passed as -db argument to
|
||||
`clang-include-fixer-executable'."
|
||||
:group 'clang-include-fixer
|
||||
:type '(radio
|
||||
(const :tag "Hard-coded mapping" :fixed)
|
||||
(const :tag "YAML" yaml)
|
||||
(symbol :tag "Other"))
|
||||
:risky t)
|
||||
|
||||
(defcustom clang-include-fixer-init-string
|
||||
""
|
||||
"Database initialization string for clang-include-fixer.
|
||||
This string is passed as -input argument to
|
||||
`clang-include-fixer-executable'."
|
||||
:group 'clang-include-fixer
|
||||
:type 'string
|
||||
:risky t)
|
||||
|
||||
(defface clang-include-fixer-highlight '((t :background "green"))
|
||||
"Used for highlighting the symbol for which a header file is being added.")
|
||||
|
||||
;;;###autoload
|
||||
(defun clang-include-fixer ()
|
||||
"Invoke the Include Fixer to insert missing C++ headers."
|
||||
(interactive)
|
||||
(message (concat "Calling the include fixer. "
|
||||
"This might take some seconds. Please wait."))
|
||||
(clang-include-fixer--start #'clang-include-fixer--add-header
|
||||
"-output-headers"))
|
||||
|
||||
;;;###autoload
|
||||
(defun clang-include-fixer-at-point ()
|
||||
"Invoke the Clang include fixer for the symbol at point."
|
||||
(interactive)
|
||||
(let ((symbol (clang-include-fixer--symbol-at-point)))
|
||||
(unless symbol
|
||||
(user-error "No symbol at current location"))
|
||||
(clang-include-fixer-from-symbol symbol)))
|
||||
|
||||
;;;###autoload
|
||||
(defun clang-include-fixer-from-symbol (symbol)
|
||||
"Invoke the Clang include fixer for the SYMBOL.
|
||||
When called interactively, prompts the user for a symbol."
|
||||
(interactive
|
||||
(list (read-string "Symbol: " (clang-include-fixer--symbol-at-point))))
|
||||
(clang-include-fixer--start #'clang-include-fixer--add-header
|
||||
(format "-query-symbol=%s" symbol)))
|
||||
|
||||
(defun clang-include-fixer--start (callback &rest args)
|
||||
"Asynchronously start clang-include-fixer with parameters ARGS.
|
||||
The current file name is passed after ARGS as last argument. If
|
||||
the call was successful the returned result is stored in a
|
||||
temporary buffer, and CALLBACK is called with the temporary
|
||||
buffer as only argument."
|
||||
(unless buffer-file-name
|
||||
(user-error "clang-include-fixer works only in buffers that visit a file"))
|
||||
(let ((process (if (and (fboundp 'make-process)
|
||||
;; ‘make-process’ doesn’t support remote files
|
||||
;; (https://debbugs.gnu.org/cgi/bugreport.cgi?bug=28691).
|
||||
(not (find-file-name-handler default-directory
|
||||
'start-file-process)))
|
||||
;; Prefer using ‘make-process’ if possible, because
|
||||
;; ‘start-process’ doesn’t allow us to separate the
|
||||
;; standard error from the output.
|
||||
(clang-include-fixer--make-process callback args)
|
||||
(clang-include-fixer--start-process callback args))))
|
||||
(save-restriction
|
||||
(widen)
|
||||
(process-send-region process (point-min) (point-max)))
|
||||
(process-send-eof process))
|
||||
nil)
|
||||
|
||||
(defun clang-include-fixer--make-process (callback args)
|
||||
"Start a new clang-include-fixer process using `make-process'.
|
||||
CALLBACK is called after the process finishes successfully; it is
|
||||
called with a single argument, the buffer where standard output
|
||||
has been inserted. ARGS is a list of additional command line
|
||||
arguments. Return the new process object."
|
||||
(let ((stdin (current-buffer))
|
||||
(stdout (generate-new-buffer "*clang-include-fixer output*"))
|
||||
(stderr (generate-new-buffer "*clang-include-fixer errors*")))
|
||||
(make-process :name "clang-include-fixer"
|
||||
:buffer stdout
|
||||
:command (clang-include-fixer--command args)
|
||||
:coding 'utf-8-unix
|
||||
:noquery t
|
||||
:connection-type 'pipe
|
||||
:sentinel (clang-include-fixer--sentinel stdin stdout stderr
|
||||
callback)
|
||||
:stderr stderr)))
|
||||
|
||||
(defun clang-include-fixer--start-process (callback args)
|
||||
"Start a new clang-include-fixer process using `start-file-process'.
|
||||
CALLBACK is called after the process finishes successfully; it is
|
||||
called with a single argument, the buffer where standard output
|
||||
has been inserted. ARGS is a list of additional command line
|
||||
arguments. Return the new process object."
|
||||
(let* ((stdin (current-buffer))
|
||||
(stdout (generate-new-buffer "*clang-include-fixer output*"))
|
||||
(process-connection-type nil)
|
||||
(process (apply #'start-file-process "clang-include-fixer" stdout
|
||||
(clang-include-fixer--command args))))
|
||||
(set-process-coding-system process 'utf-8-unix 'utf-8-unix)
|
||||
(set-process-query-on-exit-flag process nil)
|
||||
(set-process-sentinel process
|
||||
(clang-include-fixer--sentinel stdin stdout nil
|
||||
callback))
|
||||
process))
|
||||
|
||||
(defun clang-include-fixer--command (args)
|
||||
"Return the clang-include-fixer command line.
|
||||
Returns a list; the first element is the binary to
|
||||
execute (`clang-include-fixer-executable'), and the remaining
|
||||
elements are the command line arguments. Adds proper arguments
|
||||
for `clang-include-fixer-input-format' and
|
||||
`clang-include-fixer-init-string'. Appends the current buffer's
|
||||
file name; prepends ARGS directly in front of it."
|
||||
(cl-check-type args list)
|
||||
`(,clang-include-fixer-executable
|
||||
,(format "-db=%s" clang-include-fixer-input-format)
|
||||
,(format "-input=%s" clang-include-fixer-init-string)
|
||||
"-stdin"
|
||||
,@args
|
||||
,(clang-include-fixer--file-local-name buffer-file-name)))
|
||||
|
||||
(defun clang-include-fixer--sentinel (stdin stdout stderr callback)
|
||||
"Return a process sentinel for clang-include-fixer processes.
|
||||
STDIN, STDOUT, and STDERR are buffers for the standard streams;
|
||||
only STDERR may be nil. CALLBACK is called in the case of
|
||||
success; it is called with a single argument, STDOUT. On
|
||||
failure, a buffer containing the error output is displayed."
|
||||
(cl-check-type stdin buffer)
|
||||
(cl-check-type stdout buffer)
|
||||
(cl-check-type stderr (or null buffer))
|
||||
(cl-check-type callback function)
|
||||
(lambda (process event)
|
||||
(cl-check-type process process)
|
||||
(cl-check-type event string)
|
||||
(unwind-protect
|
||||
(if (string-equal event "finished\n")
|
||||
(progn
|
||||
(when stderr (kill-buffer stderr))
|
||||
(with-current-buffer stdin
|
||||
(funcall callback stdout))
|
||||
(kill-buffer stdout))
|
||||
(when stderr (kill-buffer stdout))
|
||||
(message "clang-include-fixer failed")
|
||||
(with-current-buffer (or stderr stdout)
|
||||
(insert "\nProcess " (process-name process)
|
||||
?\s event))
|
||||
(display-buffer (or stderr stdout))))
|
||||
nil))
|
||||
|
||||
(defun clang-include-fixer--replace-buffer (stdout)
|
||||
"Replace current buffer by content of STDOUT."
|
||||
(cl-check-type stdout buffer)
|
||||
(barf-if-buffer-read-only)
|
||||
(cond ((fboundp 'replace-buffer-contents) (replace-buffer-contents stdout))
|
||||
((clang-include-fixer--insert-line stdout (current-buffer)))
|
||||
(t (erase-buffer) (insert-buffer-substring stdout)))
|
||||
(message "Fix applied")
|
||||
nil)
|
||||
|
||||
(defun clang-include-fixer--insert-line (from to)
|
||||
"Insert a single missing line from the buffer FROM into TO.
|
||||
FROM and TO must be buffers. If the contents of FROM and TO are
|
||||
equal, do nothing and return non-nil. If FROM contains a single
|
||||
line missing from TO, insert that line into TO so that the buffer
|
||||
contents are equal and return non-nil. Otherwise, do nothing and
|
||||
return nil. Buffer restrictions are ignored."
|
||||
(cl-check-type from buffer)
|
||||
(cl-check-type to buffer)
|
||||
(with-current-buffer from
|
||||
(save-excursion
|
||||
(save-restriction
|
||||
(widen)
|
||||
(with-current-buffer to
|
||||
(save-excursion
|
||||
(save-restriction
|
||||
(widen)
|
||||
;; Search for the first buffer difference.
|
||||
(let ((chars (abs (compare-buffer-substrings to nil nil from nil nil))))
|
||||
(if (zerop chars)
|
||||
;; Buffer contents are equal, nothing to do.
|
||||
t
|
||||
(goto-char chars)
|
||||
;; We might have ended up in the middle of a line if the
|
||||
;; current line partially matches. In this case we would
|
||||
;; have to insert more than a line. Move to the beginning of
|
||||
;; the line to avoid this situation.
|
||||
(beginning-of-line)
|
||||
(with-current-buffer from
|
||||
(goto-char chars)
|
||||
(beginning-of-line)
|
||||
(let ((from-begin (point))
|
||||
(from-end (progn (forward-line) (point)))
|
||||
(to-point (with-current-buffer to (point))))
|
||||
;; Search for another buffer difference after the line in
|
||||
;; question. If there is none, we can proceed.
|
||||
(when (zerop (compare-buffer-substrings from from-end nil
|
||||
to to-point nil))
|
||||
(with-current-buffer to
|
||||
(insert-buffer-substring from from-begin from-end))
|
||||
t))))))))))))
|
||||
|
||||
(defun clang-include-fixer--add-header (stdout)
|
||||
"Analyse the result of clang-include-fixer stored in STDOUT.
|
||||
Add a missing header if there is any. If there are multiple
|
||||
possible headers the user can select one of them to be included.
|
||||
Temporarily highlight the affected symbols. Asynchronously call
|
||||
clang-include-fixer to insert the selected header."
|
||||
(cl-check-type stdout buffer-live)
|
||||
(let ((context (clang-include-fixer--parse-json stdout)))
|
||||
(let-alist context
|
||||
(cond
|
||||
((null .QuerySymbolInfos)
|
||||
(message "The file is fine, no need to add a header."))
|
||||
((null .HeaderInfos)
|
||||
(message "Couldn't find header for '%s'"
|
||||
(let-alist (car .QuerySymbolInfos) .RawIdentifier)))
|
||||
(t
|
||||
;; Users may C-g in prompts, make sure the process sentinel
|
||||
;; behaves correctly.
|
||||
(with-local-quit
|
||||
;; Replace the HeaderInfos list by a single header selected by
|
||||
;; the user.
|
||||
(clang-include-fixer--select-header context)
|
||||
;; Call clang-include-fixer again to insert the selected header.
|
||||
(clang-include-fixer--start
|
||||
(let ((old-tick (buffer-chars-modified-tick)))
|
||||
(lambda (stdout)
|
||||
(when (/= old-tick (buffer-chars-modified-tick))
|
||||
;; Replacing the buffer now would undo the user’s changes.
|
||||
(user-error (concat "The buffer has been changed "
|
||||
"before the header could be inserted")))
|
||||
(clang-include-fixer--replace-buffer stdout)
|
||||
(let-alist context
|
||||
(let-alist (car .HeaderInfos)
|
||||
(with-local-quit
|
||||
(run-hook-with-args 'clang-include-fixer-add-include-hook
|
||||
(substring .Header 1 -1)
|
||||
(string= (substring .Header 0 1) "<")))))))
|
||||
(format "-insert-header=%s"
|
||||
(clang-include-fixer--encode-json context))))))))
|
||||
nil)
|
||||
|
||||
(defun clang-include-fixer--select-header (context)
|
||||
"Prompt the user for a header if necessary.
|
||||
CONTEXT must be a clang-include-fixer context object in
|
||||
association list format. If it contains more than one HeaderInfo
|
||||
element, prompt the user to select one of the headers. CONTEXT
|
||||
is modified to include only the selected element."
|
||||
(cl-check-type context cons)
|
||||
(let-alist context
|
||||
(if (cdr .HeaderInfos)
|
||||
(clang-include-fixer--prompt-for-header context)
|
||||
(message "Only one include is missing: %s"
|
||||
(let-alist (car .HeaderInfos) .Header))))
|
||||
nil)
|
||||
|
||||
(defvar clang-include-fixer--history nil
|
||||
"History for `clang-include-fixer--prompt-for-header'.")
|
||||
|
||||
(defun clang-include-fixer--prompt-for-header (context)
|
||||
"Prompt the user for a single header.
|
||||
The choices are taken from the HeaderInfo elements in CONTEXT.
|
||||
They are replaced by the single element selected by the user."
|
||||
(let-alist context
|
||||
(let ((symbol (clang-include-fixer--symbol-name .QuerySymbolInfos))
|
||||
;; Add temporary highlighting so that the user knows which
|
||||
;; symbols the current session is about.
|
||||
(overlays (remove nil
|
||||
(mapcar #'clang-include-fixer--highlight .QuerySymbolInfos))))
|
||||
(unwind-protect
|
||||
(save-excursion
|
||||
;; While prompting, go to the closest overlay so that the user sees
|
||||
;; some context.
|
||||
(when overlays
|
||||
(goto-char (clang-include-fixer--closest-overlay overlays)))
|
||||
(cl-flet ((header (info) (let-alist info .Header)))
|
||||
;; The header-infos is already sorted by clang-include-fixer.
|
||||
(let* ((headers (mapcar #'header .HeaderInfos))
|
||||
(header (completing-read
|
||||
(clang-include-fixer--format-message
|
||||
"Select include for '%s': " symbol)
|
||||
headers nil :require-match nil
|
||||
'clang-include-fixer--history
|
||||
;; Specify a default to prevent the behavior
|
||||
;; described in
|
||||
;; https://github.com/DarwinAwardWinner/ido-completing-read-plus#why-does-ret-sometimes-not-select-the-first-completion-on-the-list--why-is-there-an-empty-entry-at-the-beginning-of-the-completion-list--what-happened-to-old-style-default-selection.
|
||||
(car headers)))
|
||||
(info (cl-find header .HeaderInfos :key #'header :test #'string=)))
|
||||
(unless info (user-error "No header selected"))
|
||||
(setcar .HeaderInfos info)
|
||||
(setcdr .HeaderInfos nil))))
|
||||
(mapc #'delete-overlay overlays)))))
|
||||
|
||||
(defun clang-include-fixer--symbol-name (symbol-infos)
|
||||
"Return the unique symbol name in SYMBOL-INFOS.
|
||||
Raise a signal if the symbol name is not unique."
|
||||
(let ((symbols (delete-dups (mapcar (lambda (info)
|
||||
(let-alist info .RawIdentifier))
|
||||
symbol-infos))))
|
||||
(when (cdr symbols)
|
||||
(error "Multiple symbols %s returned" symbols))
|
||||
(car symbols)))
|
||||
|
||||
(defun clang-include-fixer--highlight (symbol-info)
|
||||
"Add an overlay to highlight SYMBOL-INFO, if it points to a non-empty range.
|
||||
Return the overlay object, or nil."
|
||||
(let-alist symbol-info
|
||||
(unless (zerop .Range.Length)
|
||||
(let ((overlay (make-overlay
|
||||
(clang-include-fixer--filepos-to-bufferpos
|
||||
.Range.Offset 'approximate)
|
||||
(clang-include-fixer--filepos-to-bufferpos
|
||||
(+ .Range.Offset .Range.Length) 'approximate))))
|
||||
(overlay-put overlay 'face 'clang-include-fixer-highlight)
|
||||
overlay))))
|
||||
|
||||
(defun clang-include-fixer--closest-overlay (overlays)
|
||||
"Return the start of the overlay in OVERLAYS that is closest to point."
|
||||
(cl-check-type overlays cons)
|
||||
(let ((point (point))
|
||||
acc)
|
||||
(dolist (overlay overlays acc)
|
||||
(let ((start (overlay-start overlay)))
|
||||
(when (or (null acc) (< (abs (- point start)) (abs (- point acc))))
|
||||
(setq acc start))))))
|
||||
|
||||
(defun clang-include-fixer--parse-json (buffer)
|
||||
"Parse a JSON response from clang-include-fixer in BUFFER.
|
||||
Return the JSON object as an association list."
|
||||
(with-current-buffer buffer
|
||||
(save-excursion
|
||||
(goto-char (point-min))
|
||||
(let ((json-object-type 'alist)
|
||||
(json-array-type 'list)
|
||||
(json-key-type 'symbol)
|
||||
(json-false :json-false)
|
||||
(json-null nil)
|
||||
(json-pre-element-read-function nil)
|
||||
(json-post-element-read-function nil))
|
||||
(json-read)))))
|
||||
|
||||
(defun clang-include-fixer--encode-json (object)
|
||||
"Return the JSON representation of OBJECT as a string."
|
||||
(let ((json-encoding-separator ",")
|
||||
(json-encoding-default-indentation " ")
|
||||
(json-encoding-pretty-print nil)
|
||||
(json-encoding-lisp-style-closings nil)
|
||||
(json-encoding-object-sort-predicate nil))
|
||||
(json-encode object)))
|
||||
|
||||
(defun clang-include-fixer--symbol-at-point ()
|
||||
"Return the qualified symbol at point.
|
||||
If there is no symbol at point, return nil."
|
||||
;; Let ‘bounds-of-thing-at-point’ to do the hard work and deal with edge
|
||||
;; cases.
|
||||
(let ((bounds (bounds-of-thing-at-point 'symbol)))
|
||||
(when bounds
|
||||
(let ((beg (car bounds))
|
||||
(end (cdr bounds)))
|
||||
(save-excursion
|
||||
;; Extend the symbol range to the left. Skip over namespace
|
||||
;; delimiters and parent namespace names.
|
||||
(goto-char beg)
|
||||
(while (and (clang-include-fixer--skip-double-colon-backward)
|
||||
(skip-syntax-backward "w_")))
|
||||
;; Skip over one more namespace delimiter, for absolute names.
|
||||
(clang-include-fixer--skip-double-colon-backward)
|
||||
(setq beg (point))
|
||||
;; Extend the symbol range to the right. Skip over namespace
|
||||
;; delimiters and child namespace names.
|
||||
(goto-char end)
|
||||
(while (and (clang-include-fixer--skip-double-colon-forward)
|
||||
(skip-syntax-forward "w_")))
|
||||
(setq end (point)))
|
||||
(buffer-substring-no-properties beg end)))))
|
||||
|
||||
(defun clang-include-fixer--skip-double-colon-forward ()
|
||||
"Skip a double colon.
|
||||
When the next two characters are '::', skip them and return
|
||||
non-nil. Otherwise return nil."
|
||||
(let ((end (+ (point) 2)))
|
||||
(when (and (<= end (point-max))
|
||||
(string-equal (buffer-substring-no-properties (point) end) "::"))
|
||||
(goto-char end)
|
||||
t)))
|
||||
|
||||
(defun clang-include-fixer--skip-double-colon-backward ()
|
||||
"Skip a double colon.
|
||||
When the previous two characters are '::', skip them and return
|
||||
non-nil. Otherwise return nil."
|
||||
(let ((beg (- (point) 2)))
|
||||
(when (and (>= beg (point-min))
|
||||
(string-equal (buffer-substring-no-properties beg (point)) "::"))
|
||||
(goto-char beg)
|
||||
t)))
|
||||
|
||||
;; ‘filepos-to-bufferpos’ is new in Emacs 25.1. Provide a fallback for older
|
||||
;; versions.
|
||||
(defalias 'clang-include-fixer--filepos-to-bufferpos
|
||||
(if (fboundp 'filepos-to-bufferpos)
|
||||
'filepos-to-bufferpos
|
||||
(lambda (byte &optional _quality _coding-system)
|
||||
(byte-to-position (1+ byte)))))
|
||||
|
||||
;; ‘format-message’ is new in Emacs 25.1. Provide a fallback for older
|
||||
;; versions.
|
||||
(defalias 'clang-include-fixer--format-message
|
||||
(if (fboundp 'format-message) 'format-message 'format))
|
||||
|
||||
;; ‘file-local-name’ is new in Emacs 26.1. Provide a fallback for older
|
||||
;; versions.
|
||||
(defalias 'clang-include-fixer--file-local-name
|
||||
(if (fboundp 'file-local-name) #'file-local-name
|
||||
(lambda (file) (or (file-remote-p file 'localname) file))))
|
||||
|
||||
(provide 'clang-include-fixer)
|
||||
;;; clang-include-fixer.el ends here
|
||||
@ -0,0 +1,243 @@
|
||||
# This file is a minimal clang-include-fixer vim-integration. To install:
|
||||
# - Change 'binary' if clang-include-fixer is not on the path (see below).
|
||||
# - Add to your .vimrc:
|
||||
#
|
||||
# noremap <leader>cf :pyf path/to/llvm/source/tools/clang/tools/extra/clang-include-fixer/tool/clang-include-fixer.py<cr>
|
||||
#
|
||||
# This enables clang-include-fixer for NORMAL and VISUAL mode. Change
|
||||
# "<leader>cf" to another binding if you need clang-include-fixer on a
|
||||
# different key.
|
||||
#
|
||||
# To set up clang-include-fixer, see
|
||||
# http://clang.llvm.org/extra/clang-include-fixer.html
|
||||
#
|
||||
# With this integration you can press the bound key and clang-include-fixer will
|
||||
# be run on the current buffer.
|
||||
#
|
||||
# It operates on the current, potentially unsaved buffer and does not create
|
||||
# or save any files. To revert a fix, just undo.
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import difflib
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import vim
|
||||
|
||||
# set g:clang_include_fixer_path to the path to clang-include-fixer if it is not
|
||||
# on the path.
|
||||
# Change this to the full path if clang-include-fixer is not on the path.
|
||||
binary = "clang-include-fixer"
|
||||
if vim.eval('exists("g:clang_include_fixer_path")') == "1":
|
||||
binary = vim.eval("g:clang_include_fixer_path")
|
||||
|
||||
maximum_suggested_headers = 3
|
||||
if vim.eval('exists("g:clang_include_fixer_maximum_suggested_headers")') == "1":
|
||||
maximum_suggested_headers = max(
|
||||
1, vim.eval("g:clang_include_fixer_maximum_suggested_headers")
|
||||
)
|
||||
|
||||
increment_num = 5
|
||||
if vim.eval('exists("g:clang_include_fixer_increment_num")') == "1":
|
||||
increment_num = max(1, vim.eval("g:clang_include_fixer_increment_num"))
|
||||
|
||||
jump_to_include = False
|
||||
if vim.eval('exists("g:clang_include_fixer_jump_to_include")') == "1":
|
||||
jump_to_include = vim.eval("g:clang_include_fixer_jump_to_include") != "0"
|
||||
|
||||
query_mode = False
|
||||
if vim.eval('exists("g:clang_include_fixer_query_mode")') == "1":
|
||||
query_mode = vim.eval("g:clang_include_fixer_query_mode") != "0"
|
||||
|
||||
|
||||
def GetUserSelection(message, headers, maximum_suggested_headers):
|
||||
eval_message = message + "\n"
|
||||
for idx, header in enumerate(headers[0:maximum_suggested_headers]):
|
||||
eval_message += "({0}). {1}\n".format(idx + 1, header)
|
||||
eval_message += "Enter (q) to quit;"
|
||||
if maximum_suggested_headers < len(headers):
|
||||
eval_message += " (m) to show {0} more candidates.".format(
|
||||
min(increment_num, len(headers) - maximum_suggested_headers)
|
||||
)
|
||||
|
||||
eval_message += "\nSelect (default 1): "
|
||||
res = vim.eval("input('{0}')".format(eval_message))
|
||||
if res == "":
|
||||
# choose the top ranked header by default
|
||||
idx = 1
|
||||
elif res == "q":
|
||||
raise Exception(" Insertion cancelled...")
|
||||
elif res == "m":
|
||||
return GetUserSelection(
|
||||
message, headers, maximum_suggested_headers + increment_num
|
||||
)
|
||||
else:
|
||||
try:
|
||||
idx = int(res)
|
||||
if idx <= 0 or idx > len(headers):
|
||||
raise Exception()
|
||||
except Exception:
|
||||
# Show a new prompt on invalid option instead of aborting so that users
|
||||
# don't need to wait for another clang-include-fixer run.
|
||||
print("Invalid option: {}".format(res), file=sys.stderr)
|
||||
return GetUserSelection(message, headers, maximum_suggested_headers)
|
||||
return headers[idx - 1]
|
||||
|
||||
|
||||
def execute(command, text):
|
||||
# Avoid flashing a cmd prompt on Windows.
|
||||
startupinfo = None
|
||||
if sys.platform.startswith("win32"):
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
startupinfo.wShowWindow = subprocess.SW_HIDE
|
||||
|
||||
p = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE,
|
||||
startupinfo=startupinfo,
|
||||
)
|
||||
return p.communicate(input=text.encode("utf-8"))
|
||||
|
||||
|
||||
def InsertHeaderToVimBuffer(header, text):
|
||||
command = [
|
||||
binary,
|
||||
"-stdin",
|
||||
"-insert-header=" + json.dumps(header),
|
||||
vim.current.buffer.name,
|
||||
]
|
||||
stdout, stderr = execute(command, text)
|
||||
if stderr:
|
||||
raise Exception(stderr)
|
||||
if stdout:
|
||||
lines = stdout.splitlines()
|
||||
sequence = difflib.SequenceMatcher(None, vim.current.buffer, lines)
|
||||
line_num = None
|
||||
for op in reversed(sequence.get_opcodes()):
|
||||
if op[0] != "equal":
|
||||
vim.current.buffer[op[1] : op[2]] = lines[op[3] : op[4]]
|
||||
if op[0] == "insert":
|
||||
# line_num in vim is 1-based.
|
||||
line_num = op[1] + 1
|
||||
|
||||
if jump_to_include and line_num:
|
||||
vim.current.window.cursor = (line_num, 0)
|
||||
|
||||
|
||||
# The vim internal implementation (expand("cword"/"cWORD")) doesn't support
|
||||
# our use case very well, we re-implement our own one.
|
||||
def get_symbol_under_cursor():
|
||||
line = vim.eval('line(".")')
|
||||
# column number in vim is 1-based.
|
||||
col = int(vim.eval('col(".")')) - 1
|
||||
line_text = vim.eval("getline({0})".format(line))
|
||||
if len(line_text) == 0:
|
||||
return ""
|
||||
symbol_pos_begin = col
|
||||
p = re.compile("[a-zA-Z0-9:_]")
|
||||
while symbol_pos_begin >= 0 and p.match(line_text[symbol_pos_begin]):
|
||||
symbol_pos_begin -= 1
|
||||
|
||||
symbol_pos_end = col
|
||||
while symbol_pos_end < len(line_text) and p.match(line_text[symbol_pos_end]):
|
||||
symbol_pos_end += 1
|
||||
return line_text[symbol_pos_begin + 1 : symbol_pos_end]
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Vim integration for clang-include-fixer"
|
||||
)
|
||||
parser.add_argument("-db", default="yaml", help="clang-include-fixer input format.")
|
||||
parser.add_argument("-input", default="", help="String to initialize the database.")
|
||||
# Don't throw exception when parsing unknown arguments to make the script
|
||||
# work in neovim.
|
||||
# Neovim (at least v0.2.1) somehow mangles the sys.argv in a weird way: it
|
||||
# will pass additional arguments (e.g. "-c script_host.py") to sys.argv,
|
||||
# which makes the script fail.
|
||||
args, _ = parser.parse_known_args()
|
||||
|
||||
# Get the current text.
|
||||
buf = vim.current.buffer
|
||||
text = "\n".join(buf)
|
||||
|
||||
if query_mode:
|
||||
symbol = get_symbol_under_cursor()
|
||||
if len(symbol) == 0:
|
||||
print("Skip querying empty symbol.")
|
||||
return
|
||||
command = [
|
||||
binary,
|
||||
"-stdin",
|
||||
"-query-symbol=" + get_symbol_under_cursor(),
|
||||
"-db=" + args.db,
|
||||
"-input=" + args.input,
|
||||
vim.current.buffer.name,
|
||||
]
|
||||
else:
|
||||
# Run command to get all headers.
|
||||
command = [
|
||||
binary,
|
||||
"-stdin",
|
||||
"-output-headers",
|
||||
"-db=" + args.db,
|
||||
"-input=" + args.input,
|
||||
vim.current.buffer.name,
|
||||
]
|
||||
stdout, stderr = execute(command, text)
|
||||
if stderr:
|
||||
print(
|
||||
"Error while running clang-include-fixer: {}".format(stderr),
|
||||
file=sys.stderr,
|
||||
)
|
||||
return
|
||||
|
||||
include_fixer_context = json.loads(stdout)
|
||||
query_symbol_infos = include_fixer_context["QuerySymbolInfos"]
|
||||
if not query_symbol_infos:
|
||||
print("The file is fine, no need to add a header.")
|
||||
return
|
||||
symbol = query_symbol_infos[0]["RawIdentifier"]
|
||||
# The header_infos is already sorted by clang-include-fixer.
|
||||
header_infos = include_fixer_context["HeaderInfos"]
|
||||
# Deduplicate headers while keeping the order, so that the same header would
|
||||
# not be suggested twice.
|
||||
unique_headers = []
|
||||
seen = set()
|
||||
for header_info in header_infos:
|
||||
header = header_info["Header"]
|
||||
if header not in seen:
|
||||
seen.add(header)
|
||||
unique_headers.append(header)
|
||||
|
||||
if not unique_headers:
|
||||
print("Couldn't find a header for {0}.".format(symbol))
|
||||
return
|
||||
|
||||
try:
|
||||
selected = unique_headers[0]
|
||||
inserted_header_infos = header_infos
|
||||
if len(unique_headers) > 1:
|
||||
selected = GetUserSelection(
|
||||
"choose a header file for {0}.".format(symbol),
|
||||
unique_headers,
|
||||
maximum_suggested_headers,
|
||||
)
|
||||
inserted_header_infos = [
|
||||
header for header in header_infos if header["Header"] == selected
|
||||
]
|
||||
include_fixer_context["HeaderInfos"] = inserted_header_infos
|
||||
|
||||
InsertHeaderToVimBuffer(include_fixer_context, text)
|
||||
print("Added #include {0} for {1}.".format(selected, symbol))
|
||||
except Exception as error:
|
||||
print(error, file=sys.stderr)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -0,0 +1,79 @@
|
||||
;;; clang-rename.el --- Renames every occurrence of a symbol found at <offset>. -*- lexical-binding: t; -*-
|
||||
|
||||
;; Keywords: tools, c
|
||||
|
||||
;;; Commentary:
|
||||
|
||||
;; To install clang-rename.el make sure the directory of this file is in your
|
||||
;; `load-path' and add
|
||||
;;
|
||||
;; (require 'clang-rename)
|
||||
;;
|
||||
;; to your .emacs configuration.
|
||||
|
||||
;;; Code:
|
||||
|
||||
(defgroup clang-rename nil
|
||||
"Integration with clang-rename"
|
||||
:group 'c)
|
||||
|
||||
(defcustom clang-rename-binary "clang-rename"
|
||||
"Path to clang-rename executable."
|
||||
:type '(file :must-match t)
|
||||
:group 'clang-rename)
|
||||
|
||||
;;;###autoload
|
||||
(defun clang-rename (new-name)
|
||||
"Rename all instances of the symbol at point to NEW-NAME using clang-rename."
|
||||
(interactive "sEnter a new name: ")
|
||||
(save-some-buffers :all)
|
||||
;; clang-rename should not be combined with other operations when undoing.
|
||||
(undo-boundary)
|
||||
(let ((output-buffer (get-buffer-create "*clang-rename*")))
|
||||
(with-current-buffer output-buffer (erase-buffer))
|
||||
(let ((exit-code (call-process
|
||||
clang-rename-binary nil output-buffer nil
|
||||
(format "-offset=%d"
|
||||
;; clang-rename wants file (byte) offsets, not
|
||||
;; buffer (character) positions.
|
||||
(clang-rename--bufferpos-to-filepos
|
||||
;; Emacs treats one character after a symbol as
|
||||
;; part of the symbol, but clang-rename doesn’t.
|
||||
;; Use the beginning of the current symbol, if
|
||||
;; available, to resolve the inconsistency.
|
||||
(or (car (bounds-of-thing-at-point 'symbol))
|
||||
(point))
|
||||
'exact))
|
||||
(format "-new-name=%s" new-name)
|
||||
"-i" (buffer-file-name))))
|
||||
(if (and (integerp exit-code) (zerop exit-code))
|
||||
;; Success; revert current buffer so it gets the modifications.
|
||||
(progn
|
||||
(kill-buffer output-buffer)
|
||||
(revert-buffer :ignore-auto :noconfirm :preserve-modes))
|
||||
;; Failure; append exit code to output buffer and display it.
|
||||
(let ((message (clang-rename--format-message
|
||||
"clang-rename failed with %s %s"
|
||||
(if (integerp exit-code) "exit status" "signal")
|
||||
exit-code)))
|
||||
(with-current-buffer output-buffer
|
||||
(insert ?\n message ?\n))
|
||||
(message "%s" message)
|
||||
(display-buffer output-buffer))))))
|
||||
|
||||
(defalias 'clang-rename--bufferpos-to-filepos
|
||||
(if (fboundp 'bufferpos-to-filepos)
|
||||
'bufferpos-to-filepos
|
||||
;; Emacs 24 doesn’t have ‘bufferpos-to-filepos’, simulate it using
|
||||
;; ‘position-bytes’.
|
||||
(lambda (position &optional _quality _coding-system)
|
||||
(1- (position-bytes position)))))
|
||||
|
||||
;; ‘format-message’ is new in Emacs 25.1. Provide a fallback for older
|
||||
;; versions.
|
||||
(defalias 'clang-rename--format-message
|
||||
(if (fboundp 'format-message) 'format-message 'format))
|
||||
|
||||
(provide 'clang-rename)
|
||||
|
||||
;;; clang-rename.el ends here
|
||||
@ -0,0 +1,70 @@
|
||||
"""
|
||||
Minimal clang-rename integration with Vim.
|
||||
|
||||
Before installing make sure one of the following is satisfied:
|
||||
|
||||
* clang-rename is in your PATH
|
||||
* `g:clang_rename_path` in ~/.vimrc points to valid clang-rename executable
|
||||
* `binary` in clang-rename.py points to valid to clang-rename executable
|
||||
|
||||
To install, simply put this into your ~/.vimrc for python2 support
|
||||
|
||||
noremap <leader>cr :pyf <path-to>/clang-rename.py<cr>
|
||||
|
||||
For python3 use the following command (note the change from :pyf to :py3f)
|
||||
|
||||
noremap <leader>cr :py3f <path-to>/clang-rename.py<cr>
|
||||
|
||||
IMPORTANT NOTE: Before running the tool, make sure you saved the file.
|
||||
|
||||
All you have to do now is to place a cursor on a variable/function/class which
|
||||
you would like to rename and press '<leader>cr'. You will be prompted for a new
|
||||
name if the cursor points to a valid symbol.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
import vim
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
binary = "clang-rename"
|
||||
if vim.eval('exists("g:clang_rename_path")') == "1":
|
||||
binary = vim.eval("g:clang_rename_path")
|
||||
|
||||
# Get arguments for clang-rename binary.
|
||||
offset = int(vim.eval('line2byte(line("."))+col(".")')) - 2
|
||||
if offset < 0:
|
||||
print(
|
||||
"Couldn't determine cursor position. Is your file empty?", file=sys.stderr
|
||||
)
|
||||
return
|
||||
filename = vim.current.buffer.name
|
||||
|
||||
new_name_request_message = "type new name:"
|
||||
new_name = vim.eval("input('{}\n')".format(new_name_request_message))
|
||||
|
||||
# Call clang-rename.
|
||||
command = [
|
||||
binary,
|
||||
filename,
|
||||
"-i",
|
||||
"-offset",
|
||||
str(offset),
|
||||
"-new-name",
|
||||
str(new_name),
|
||||
]
|
||||
# FIXME: make it possible to run the tool on unsaved file.
|
||||
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
|
||||
if stderr:
|
||||
print(stderr)
|
||||
|
||||
# Reload all buffers in Vim.
|
||||
vim.command("checktime")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -0,0 +1,382 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# ===- clang-tidy-diff.py - ClangTidy Diff Checker -----------*- python -*--===#
|
||||
#
|
||||
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
# See https://llvm.org/LICENSE.txt for license information.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
#
|
||||
# ===-----------------------------------------------------------------------===#
|
||||
|
||||
r"""
|
||||
ClangTidy Diff Checker
|
||||
======================
|
||||
|
||||
This script reads input from a unified diff, runs clang-tidy on all changed
|
||||
files and outputs clang-tidy warnings in changed lines only. This is useful to
|
||||
detect clang-tidy regressions in the lines touched by a specific patch.
|
||||
Example usage for git/svn users:
|
||||
|
||||
git diff -U0 HEAD^ | clang-tidy-diff.py -p1
|
||||
svn diff --diff-cmd=diff -x-U0 | \
|
||||
clang-tidy-diff.py -fix -checks=-*,modernize-use-override
|
||||
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
yaml = None
|
||||
|
||||
is_py2 = sys.version[0] == "2"
|
||||
|
||||
if is_py2:
|
||||
import Queue as queue
|
||||
else:
|
||||
import queue as queue
|
||||
|
||||
|
||||
def run_tidy(task_queue, lock, timeout, failed_files):
|
||||
watchdog = None
|
||||
while True:
|
||||
command = task_queue.get()
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
|
||||
if timeout is not None:
|
||||
watchdog = threading.Timer(timeout, proc.kill)
|
||||
watchdog.start()
|
||||
|
||||
stdout, stderr = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
if proc.returncode < 0:
|
||||
msg = "Terminated by signal %d : %s\n" % (
|
||||
-proc.returncode,
|
||||
" ".join(command),
|
||||
)
|
||||
stderr += msg.encode("utf-8")
|
||||
failed_files.append(command)
|
||||
|
||||
with lock:
|
||||
sys.stdout.write(stdout.decode("utf-8") + "\n")
|
||||
sys.stdout.flush()
|
||||
if stderr:
|
||||
sys.stderr.write(stderr.decode("utf-8") + "\n")
|
||||
sys.stderr.flush()
|
||||
except Exception as e:
|
||||
with lock:
|
||||
sys.stderr.write("Failed: " + str(e) + ": ".join(command) + "\n")
|
||||
finally:
|
||||
with lock:
|
||||
if not (timeout is None or watchdog is None):
|
||||
if not watchdog.is_alive():
|
||||
sys.stderr.write(
|
||||
"Terminated by timeout: " + " ".join(command) + "\n"
|
||||
)
|
||||
watchdog.cancel()
|
||||
task_queue.task_done()
|
||||
|
||||
|
||||
def start_workers(max_tasks, tidy_caller, arguments):
|
||||
for _ in range(max_tasks):
|
||||
t = threading.Thread(target=tidy_caller, args=arguments)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
|
||||
def merge_replacement_files(tmpdir, mergefile):
|
||||
"""Merge all replacement files in a directory into a single file"""
|
||||
# The fixes suggested by clang-tidy >= 4.0.0 are given under
|
||||
# the top level key 'Diagnostics' in the output yaml files
|
||||
mergekey = "Diagnostics"
|
||||
merged = []
|
||||
for replacefile in glob.iglob(os.path.join(tmpdir, "*.yaml")):
|
||||
content = yaml.safe_load(open(replacefile, "r"))
|
||||
if not content:
|
||||
continue # Skip empty files.
|
||||
merged.extend(content.get(mergekey, []))
|
||||
|
||||
if merged:
|
||||
# MainSourceFile: The key is required by the definition inside
|
||||
# include/clang/Tooling/ReplacementsYaml.h, but the value
|
||||
# is actually never used inside clang-apply-replacements,
|
||||
# so we set it to '' here.
|
||||
output = {"MainSourceFile": "", mergekey: merged}
|
||||
with open(mergefile, "w") as out:
|
||||
yaml.safe_dump(output, out)
|
||||
else:
|
||||
# Empty the file:
|
||||
open(mergefile, "w").close()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Run clang-tidy against changed files, and "
|
||||
"output diagnostics only for modified "
|
||||
"lines."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-clang-tidy-binary",
|
||||
metavar="PATH",
|
||||
default="clang-tidy",
|
||||
help="path to clang-tidy binary",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
metavar="NUM",
|
||||
default=0,
|
||||
help="strip the smallest prefix containing P slashes",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-regex",
|
||||
metavar="PATTERN",
|
||||
default=None,
|
||||
help="custom pattern selecting file paths to check "
|
||||
"(case sensitive, overrides -iregex)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-iregex",
|
||||
metavar="PATTERN",
|
||||
default=r".*\.(cpp|cc|c\+\+|cxx|c|cl|h|hpp|m|mm|inc)",
|
||||
help="custom pattern selecting file paths to check "
|
||||
"(case insensitive, overridden by -regex)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-j",
|
||||
type=int,
|
||||
default=1,
|
||||
help="number of tidy instances to be run in parallel.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-timeout", type=int, default=None, help="timeout per each file in seconds."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-fix", action="store_true", default=False, help="apply suggested fixes"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-checks",
|
||||
help="checks filter, when not specified, use clang-tidy " "default",
|
||||
default="",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-config-file",
|
||||
dest="config_file",
|
||||
help="Specify the path of .clang-tidy or custom config file",
|
||||
default="",
|
||||
)
|
||||
parser.add_argument("-use-color", action="store_true", help="Use colors in output")
|
||||
parser.add_argument(
|
||||
"-path", dest="build_path", help="Path used to read a compile command database."
|
||||
)
|
||||
if yaml:
|
||||
parser.add_argument(
|
||||
"-export-fixes",
|
||||
metavar="FILE_OR_DIRECTORY",
|
||||
dest="export_fixes",
|
||||
help="A directory or a yaml file to store suggested fixes in, "
|
||||
"which can be applied with clang-apply-replacements. If the "
|
||||
"parameter is a directory, the fixes of each compilation unit are "
|
||||
"stored in individual yaml files in the directory.",
|
||||
)
|
||||
else:
|
||||
parser.add_argument(
|
||||
"-export-fixes",
|
||||
metavar="DIRECTORY",
|
||||
dest="export_fixes",
|
||||
help="A directory to store suggested fixes in, which can be applied "
|
||||
"with clang-apply-replacements. The fixes of each compilation unit are "
|
||||
"stored in individual yaml files in the directory.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-extra-arg",
|
||||
dest="extra_arg",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Additional argument to append to the compiler " "command line.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-extra-arg-before",
|
||||
dest="extra_arg_before",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Additional argument to prepend to the compiler " "command line.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-quiet",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Run clang-tidy in quiet mode",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-load",
|
||||
dest="plugins",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Load the specified plugin in clang-tidy.",
|
||||
)
|
||||
|
||||
clang_tidy_args = []
|
||||
argv = sys.argv[1:]
|
||||
if "--" in argv:
|
||||
clang_tidy_args.extend(argv[argv.index("--") :])
|
||||
argv = argv[: argv.index("--")]
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
# Extract changed lines for each file.
|
||||
filename = None
|
||||
lines_by_file = {}
|
||||
for line in sys.stdin:
|
||||
match = re.search('^\+\+\+\ "?(.*?/){%s}([^ \t\n"]*)' % args.p, line)
|
||||
if match:
|
||||
filename = match.group(2)
|
||||
if filename is None:
|
||||
continue
|
||||
|
||||
if args.regex is not None:
|
||||
if not re.match("^%s$" % args.regex, filename):
|
||||
continue
|
||||
else:
|
||||
if not re.match("^%s$" % args.iregex, filename, re.IGNORECASE):
|
||||
continue
|
||||
|
||||
match = re.search("^@@.*\+(\d+)(,(\d+))?", line)
|
||||
if match:
|
||||
start_line = int(match.group(1))
|
||||
line_count = 1
|
||||
if match.group(3):
|
||||
line_count = int(match.group(3))
|
||||
if line_count == 0:
|
||||
continue
|
||||
end_line = start_line + line_count - 1
|
||||
lines_by_file.setdefault(filename, []).append([start_line, end_line])
|
||||
|
||||
if not any(lines_by_file):
|
||||
print("No relevant changes found.")
|
||||
sys.exit(0)
|
||||
|
||||
max_task_count = args.j
|
||||
if max_task_count == 0:
|
||||
max_task_count = multiprocessing.cpu_count()
|
||||
max_task_count = min(len(lines_by_file), max_task_count)
|
||||
|
||||
combine_fixes = False
|
||||
export_fixes_dir = None
|
||||
delete_fixes_dir = False
|
||||
if args.export_fixes is not None:
|
||||
# if a directory is given, create it if it does not exist
|
||||
if args.export_fixes.endswith(os.path.sep) and not os.path.isdir(
|
||||
args.export_fixes
|
||||
):
|
||||
os.makedirs(args.export_fixes)
|
||||
|
||||
if not os.path.isdir(args.export_fixes):
|
||||
if not yaml:
|
||||
raise RuntimeError(
|
||||
"Cannot combine fixes in one yaml file. Either install PyYAML or specify an output directory."
|
||||
)
|
||||
|
||||
combine_fixes = True
|
||||
|
||||
if os.path.isdir(args.export_fixes):
|
||||
export_fixes_dir = args.export_fixes
|
||||
|
||||
if combine_fixes:
|
||||
export_fixes_dir = tempfile.mkdtemp()
|
||||
delete_fixes_dir = True
|
||||
|
||||
# Tasks for clang-tidy.
|
||||
task_queue = queue.Queue(max_task_count)
|
||||
# A lock for console output.
|
||||
lock = threading.Lock()
|
||||
|
||||
# List of files with a non-zero return code.
|
||||
failed_files = []
|
||||
|
||||
# Run a pool of clang-tidy workers.
|
||||
start_workers(
|
||||
max_task_count, run_tidy, (task_queue, lock, args.timeout, failed_files)
|
||||
)
|
||||
|
||||
# Form the common args list.
|
||||
common_clang_tidy_args = []
|
||||
if args.fix:
|
||||
common_clang_tidy_args.append("-fix")
|
||||
if args.checks != "":
|
||||
common_clang_tidy_args.append("-checks=" + args.checks)
|
||||
if args.config_file != "":
|
||||
common_clang_tidy_args.append("-config-file=" + args.config_file)
|
||||
if args.quiet:
|
||||
common_clang_tidy_args.append("-quiet")
|
||||
if args.build_path is not None:
|
||||
common_clang_tidy_args.append("-p=%s" % args.build_path)
|
||||
if args.use_color:
|
||||
common_clang_tidy_args.append("--use-color")
|
||||
for arg in args.extra_arg:
|
||||
common_clang_tidy_args.append("-extra-arg=%s" % arg)
|
||||
for arg in args.extra_arg_before:
|
||||
common_clang_tidy_args.append("-extra-arg-before=%s" % arg)
|
||||
for plugin in args.plugins:
|
||||
common_clang_tidy_args.append("-load=%s" % plugin)
|
||||
|
||||
for name in lines_by_file:
|
||||
line_filter_json = json.dumps(
|
||||
[{"name": name, "lines": lines_by_file[name]}], separators=(",", ":")
|
||||
)
|
||||
|
||||
# Run clang-tidy on files containing changes.
|
||||
command = [args.clang_tidy_binary]
|
||||
command.append("-line-filter=" + line_filter_json)
|
||||
if args.export_fixes is not None:
|
||||
# Get a temporary file. We immediately close the handle so clang-tidy can
|
||||
# overwrite it.
|
||||
(handle, tmp_name) = tempfile.mkstemp(suffix=".yaml", dir=export_fixes_dir)
|
||||
os.close(handle)
|
||||
command.append("-export-fixes=" + tmp_name)
|
||||
command.extend(common_clang_tidy_args)
|
||||
command.append(name)
|
||||
command.extend(clang_tidy_args)
|
||||
|
||||
task_queue.put(command)
|
||||
|
||||
# Application return code
|
||||
return_code = 0
|
||||
|
||||
# Wait for all threads to be done.
|
||||
task_queue.join()
|
||||
# Application return code
|
||||
return_code = 0
|
||||
if failed_files:
|
||||
return_code = 1
|
||||
|
||||
if combine_fixes:
|
||||
print("Writing fixes to " + args.export_fixes + " ...")
|
||||
try:
|
||||
merge_replacement_files(export_fixes_dir, args.export_fixes)
|
||||
except:
|
||||
sys.stderr.write("Error exporting fixes.\n")
|
||||
traceback.print_exc()
|
||||
return_code = 1
|
||||
|
||||
if delete_fixes_dir:
|
||||
shutil.rmtree(export_fixes_dir)
|
||||
sys.exit(return_code)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -0,0 +1,87 @@
|
||||
// Append using posix-style a file name or directory to Base
|
||||
function append(Base, New) {
|
||||
if (!New)
|
||||
return Base;
|
||||
if (Base)
|
||||
Base += "/";
|
||||
Base += New;
|
||||
return Base;
|
||||
}
|
||||
|
||||
// Get relative path to access FilePath from CurrentDirectory
|
||||
function computeRelativePath(FilePath, CurrentDirectory) {
|
||||
var Path = FilePath;
|
||||
while (Path) {
|
||||
if (CurrentDirectory == Path)
|
||||
return FilePath.substring(Path.length + 1);
|
||||
Path = Path.substring(0, Path.lastIndexOf("/"));
|
||||
}
|
||||
|
||||
var Dir = CurrentDirectory;
|
||||
var Result = "";
|
||||
while (Dir) {
|
||||
if (Dir == FilePath)
|
||||
break;
|
||||
Dir = Dir.substring(0, Dir.lastIndexOf("/"));
|
||||
Result = append(Result, "..")
|
||||
}
|
||||
Result = append(Result, FilePath.substring(Dir.length))
|
||||
return Result;
|
||||
}
|
||||
|
||||
function genLink(Ref, CurrentDirectory) {
|
||||
var Path = computeRelativePath(Ref.Path, CurrentDirectory);
|
||||
if (Ref.RefType == "namespace")
|
||||
Path = append(Path, "index.html");
|
||||
else
|
||||
Path = append(Path, Ref.Name + ".html")
|
||||
|
||||
ANode = document.createElement("a");
|
||||
ANode.setAttribute("href", Path);
|
||||
var TextNode = document.createTextNode(Ref.Name);
|
||||
ANode.appendChild(TextNode);
|
||||
return ANode;
|
||||
}
|
||||
|
||||
function genHTMLOfIndex(Index, CurrentDirectory, IsOutermostList) {
|
||||
// Out will store the HTML elements that Index requires to be generated
|
||||
var Out = [];
|
||||
if (Index.Name) {
|
||||
var SpanNode = document.createElement("span");
|
||||
var TextNode = document.createTextNode(Index.Name);
|
||||
SpanNode.appendChild(genLink(Index, CurrentDirectory));
|
||||
Out.push(SpanNode);
|
||||
}
|
||||
if (Index.Children.length == 0)
|
||||
return Out;
|
||||
// Only the outermost list should use ol, the others should use ul
|
||||
var ListNodeName = IsOutermostList ? "ol" : "ul";
|
||||
var ListNode = document.createElement(ListNodeName);
|
||||
for (Child of Index.Children) {
|
||||
var LiNode = document.createElement("li");
|
||||
ChildNodes = genHTMLOfIndex(Child, CurrentDirectory, false);
|
||||
for (Node of ChildNodes)
|
||||
LiNode.appendChild(Node);
|
||||
ListNode.appendChild(LiNode);
|
||||
}
|
||||
Out.push(ListNode);
|
||||
return Out;
|
||||
}
|
||||
|
||||
function createIndex(Index) {
|
||||
// Get the DOM element where the index will be created
|
||||
var IndexDiv = document.getElementById("sidebar-left");
|
||||
// Get the relative path of this file
|
||||
CurrentDirectory = IndexDiv.getAttribute("path");
|
||||
var IndexNodes = genHTMLOfIndex(Index, CurrentDirectory, true);
|
||||
for (Node of IndexNodes)
|
||||
IndexDiv.appendChild(Node);
|
||||
}
|
||||
|
||||
// Runs after DOM loads
|
||||
document.addEventListener("DOMContentLoaded", function() {
|
||||
// JsonIndex is a variable from another file that contains the index
|
||||
// in JSON format
|
||||
var Index = JSON.parse(JsonIndex);
|
||||
createIndex(Index);
|
||||
});
|
||||
@ -0,0 +1,133 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# =- run-find-all-symbols.py - Parallel find-all-symbols runner -*- python -*-=#
|
||||
#
|
||||
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
# See https://llvm.org/LICENSE.txt for license information.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
#
|
||||
# ===------------------------------------------------------------------------===#
|
||||
|
||||
"""
|
||||
Parallel find-all-symbols runner
|
||||
================================
|
||||
|
||||
Runs find-all-symbols over all files in a compilation database.
|
||||
|
||||
Example invocations.
|
||||
- Run find-all-symbols on all files in the current working directory.
|
||||
run-find-all-symbols.py <source-file>
|
||||
|
||||
Compilation database setup:
|
||||
http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import multiprocessing
|
||||
import os
|
||||
import Queue
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
|
||||
|
||||
def find_compilation_database(path):
|
||||
"""Adjusts the directory until a compilation database is found."""
|
||||
result = "./"
|
||||
while not os.path.isfile(os.path.join(result, path)):
|
||||
if os.path.realpath(result) == "/":
|
||||
print("Error: could not find compilation database.")
|
||||
sys.exit(1)
|
||||
result += "../"
|
||||
return os.path.realpath(result)
|
||||
|
||||
|
||||
def MergeSymbols(directory, args):
|
||||
"""Merge all symbol files (yaml) in a given directory into a single file."""
|
||||
invocation = [args.binary, "-merge-dir=" + directory, args.saving_path]
|
||||
subprocess.call(invocation)
|
||||
print("Merge is finished. Saving results in " + args.saving_path)
|
||||
|
||||
|
||||
def run_find_all_symbols(args, tmpdir, build_path, queue):
|
||||
"""Takes filenames out of queue and runs find-all-symbols on them."""
|
||||
while True:
|
||||
name = queue.get()
|
||||
invocation = [args.binary, name, "-output-dir=" + tmpdir, "-p=" + build_path]
|
||||
sys.stdout.write(" ".join(invocation) + "\n")
|
||||
subprocess.call(invocation)
|
||||
queue.task_done()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Runs find-all-symbols over all" "files in a compilation database."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-binary",
|
||||
metavar="PATH",
|
||||
default="./bin/find-all-symbols",
|
||||
help="path to find-all-symbols binary",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-j", type=int, default=0, help="number of instances to be run in parallel."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", dest="build_path", help="path used to read a compilation database."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-saving-path", default="./find_all_symbols_db.yaml", help="result saving path"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
db_path = "compile_commands.json"
|
||||
|
||||
if args.build_path is not None:
|
||||
build_path = args.build_path
|
||||
else:
|
||||
build_path = find_compilation_database(db_path)
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
# Load the database and extract all files.
|
||||
database = json.load(open(os.path.join(build_path, db_path)))
|
||||
files = [entry["file"] for entry in database]
|
||||
|
||||
# Filter out .rc files on Windows. CMake includes them for some reason.
|
||||
files = [f for f in files if not f.endswith(".rc")]
|
||||
|
||||
max_task = args.j
|
||||
if max_task == 0:
|
||||
max_task = multiprocessing.cpu_count()
|
||||
|
||||
try:
|
||||
# Spin up a bunch of tidy-launching threads.
|
||||
queue = Queue.Queue(max_task)
|
||||
for _ in range(max_task):
|
||||
t = threading.Thread(
|
||||
target=run_find_all_symbols, args=(args, tmpdir, build_path, queue)
|
||||
)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
# Fill the queue with files.
|
||||
for name in files:
|
||||
queue.put(name)
|
||||
|
||||
# Wait for all threads to be done.
|
||||
queue.join()
|
||||
|
||||
MergeSymbols(tmpdir, args)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
# This is a sad hack. Unfortunately subprocess goes
|
||||
# bonkers with ctrl-c and we start forking merrily.
|
||||
print("\nCtrl-C detected, goodbye.")
|
||||
os.kill(0, 9)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -0,0 +1,350 @@
|
||||
.\" Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
.\" See https://llvm.org/LICENSE.txt for license information.
|
||||
.\" SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
.\" $Id$
|
||||
.Dd Sep 21, 2023
|
||||
.Dt SCAN-BUILD 1
|
||||
.Os "clang" "18"
|
||||
.Sh NAME
|
||||
.Nm scan-build
|
||||
.Nd Clang static analyzer
|
||||
.Sh SYNOPSIS
|
||||
.Nm
|
||||
.Op Fl ohkvV
|
||||
.Op Fl analyze-headers
|
||||
.Op Fl enable-checker Op Ar checker_name
|
||||
.Op Fl disable-checker Op Ar checker_name
|
||||
.Op Fl Fl help
|
||||
.Op Fl Fl help-checkers
|
||||
.Op Fl Fl html-title Op Ar =title
|
||||
.Op Fl Fl keep-going
|
||||
.Op Fl plist
|
||||
.Op Fl plist-html
|
||||
.Op Fl Fl status-bugs
|
||||
.Op Fl Fl use-c++ Op Ar =compiler_path
|
||||
.Op Fl Fl use-cc Op Ar =compiler_path
|
||||
.Op Fl Fl view
|
||||
.Op Fl constraints Op Ar model
|
||||
.Op Fl maxloop Ar N
|
||||
.Op Fl no-failure-reports
|
||||
.Op Fl stats
|
||||
.Op Fl store Op Ar model
|
||||
.Ar build_command
|
||||
.Op build_options
|
||||
.\"
|
||||
.\" Sh DESCRIPTION
|
||||
.Sh DESCRIPTION
|
||||
.Nm
|
||||
is a Perl script that invokes the Clang static analyzer. Options used by
|
||||
.Nm
|
||||
or by the analyzer appear first, followed by the
|
||||
.Ar build_command
|
||||
and any
|
||||
.Ar build_options
|
||||
normally used to build the target system.
|
||||
.Pp
|
||||
The static analyzer employs a long list of checking algorithms, see
|
||||
.Sx CHECKERS .
|
||||
Output can be written in standard
|
||||
.Li .plist
|
||||
and/or HTML format.
|
||||
.Pp
|
||||
The following options are supported:
|
||||
.Bl -tag -width indent
|
||||
.It Fl analyze-headers
|
||||
Also analyze functions in #included files.
|
||||
.It Fl enable-checker Ar checker_name , Fl disable-checker Ar checker_name
|
||||
Enable/disable
|
||||
.Ar checker_name .
|
||||
See
|
||||
.Sx CHECKERS .
|
||||
.It Fl h , Fl Fl help
|
||||
Display this message.
|
||||
.It Fl Fl help-checkers
|
||||
List default checkers, see
|
||||
.Sx CHECKERS .
|
||||
.It Fl Fl html-title Ns Op = Ns Ar title
|
||||
Specify the title used on generated HTML pages.
|
||||
A default title is generated if
|
||||
.Ar title
|
||||
is not specified.
|
||||
.It Fl k , Fl Fl keep-going
|
||||
Add a
|
||||
.Dq keep on going
|
||||
option to
|
||||
.Ar build_command .
|
||||
Currently supports make and xcodebuild. This is a convenience option;
|
||||
one can specify this behavior directly using build options.
|
||||
.It Fl o
|
||||
Target directory for HTML report files. Subdirectories will be
|
||||
created as needed to represent separate invocations
|
||||
of the analyzer. If this option is not specified, a directory is
|
||||
created in /tmp (TMPDIR on Mac OS X) to store the reports.
|
||||
.It Fl plist
|
||||
Output the results as a set of
|
||||
.Li .plist
|
||||
files. (By default the output of
|
||||
.Nm
|
||||
is a set of HTML files.)
|
||||
.It Fl plist-html
|
||||
Output the results as a set of HTML and .plist files
|
||||
.It Fl Fl status-bugs
|
||||
Set exit status to 1 if it found potential bugs and 0 otherwise. By
|
||||
default the exit status of
|
||||
.Nm
|
||||
is that returned by
|
||||
.Ar build_command .
|
||||
.It Fl Fl use-c++ Ns Op = Ns Ar compiler_path
|
||||
Guess the default compiler for your C++ and Objective-C++ code. Use this
|
||||
option to specify an alternate compiler.
|
||||
.It Fl Fl use-cc Ns Op = Ns Ar compiler_path
|
||||
Guess the default compiler for your C and Objective-C code. Use this
|
||||
option to specify an alternate compiler.
|
||||
.It Fl v
|
||||
Verbose output from
|
||||
.Nm
|
||||
and the analyzer. A second and
|
||||
third
|
||||
.Ar v
|
||||
increases verbosity.
|
||||
.It Fl V , Fl Fl view
|
||||
View analysis results in a web browser when the build completes.
|
||||
.It Fl constraints Op Ar model
|
||||
Specify the constraint engine used by the analyzer. By default the
|
||||
.Ql range
|
||||
model is used. Specifying
|
||||
.Ql basic
|
||||
uses a simpler, less powerful constraint model used by checker-0.160
|
||||
and earlier.
|
||||
.It Fl maxloop Ar N
|
||||
Specify the number of times a block can be visited before giving
|
||||
up. Default is 4. Increase for more comprehensive coverage at a
|
||||
cost of speed.
|
||||
.It Fl no-failure-reports
|
||||
Do not create a
|
||||
.Ql failures
|
||||
subdirectory that includes analyzer crash reports and preprocessed
|
||||
source files.
|
||||
.It Fl stats
|
||||
Generates visitation statistics for the project being analyzed.
|
||||
.It Fl store Op Ar model
|
||||
Specify the store model used by the analyzer. By default, the
|
||||
.Ql region
|
||||
store model is used.
|
||||
.Ql region
|
||||
specifies a field-
|
||||
sensitive store model. Users can also specify
|
||||
.Ql basic
|
||||
which is far less precise but can more quickly analyze code.
|
||||
.Ql basic
|
||||
was the default store model for checker-0.221 and earlier.
|
||||
.\"
|
||||
.El
|
||||
.Sh EXIT STATUS
|
||||
.Nm
|
||||
returns the value returned by
|
||||
.Ar build_command
|
||||
unless
|
||||
.Fl Fl status-bugs
|
||||
or
|
||||
.Fl Fl keep-going
|
||||
is used.
|
||||
.\"
|
||||
.\" Other sections not yet used ...
|
||||
.\" .Sh ENVIRONMENT
|
||||
.\" .Sh FILES
|
||||
.\" .Sh DIAGNOSTICS
|
||||
.\" .Sh COMPATIBILITY
|
||||
.\" .Sh HISTORY
|
||||
.\" .Sh BUGS
|
||||
.\"
|
||||
.Sh CHECKERS
|
||||
The checkers listed below may be enabled/disabled using the
|
||||
.Fl enable-checker
|
||||
and
|
||||
.Fl disable-checker
|
||||
options.
|
||||
A default group of checkers is run unless explicitly disabled.
|
||||
Exactly which checkers constitute the default group is a function
|
||||
of the operating system in use; they are listed with
|
||||
.Fl Fl help-checkers .
|
||||
.Bl -tag -width indent.
|
||||
.It core.AdjustedReturnValue
|
||||
Check to see if the return value of a function call is different than
|
||||
the caller expects (e.g., from calls through function pointers).
|
||||
.It core.AttributeNonNull
|
||||
Check for null pointers passed as arguments to a function whose arguments are marked with the
|
||||
.Ql nonnull
|
||||
attribute.
|
||||
.It core.CallAndMessage
|
||||
Check for logical errors for function calls and Objective-C message expressions (e.g., uninitialized arguments, null function pointers).
|
||||
.It core.DivideZero
|
||||
Check for division by zero.
|
||||
.It core.NullDereference
|
||||
Check for dereferences of null pointers.
|
||||
.It core.StackAddressEscape
|
||||
Check that addresses to stack memory do not escape the function.
|
||||
.It core.UndefinedBinaryOperatorResult
|
||||
Check for undefined results of binary operators.
|
||||
.It core.VLASize
|
||||
Check for declarations of VLA of undefined or zero size.
|
||||
.It core.builtin.BuiltinFunctions
|
||||
Evaluate compiler builtin functions, e.g.
|
||||
.Fn alloca .
|
||||
.It core.builtin.NoReturnFunctions
|
||||
Evaluate
|
||||
.Ql panic
|
||||
functions that are known to not return to the caller.
|
||||
.It core.uninitialized.ArraySubscript
|
||||
Check for uninitialized values used as array subscripts.
|
||||
.It core.uninitialized.Assign
|
||||
Check for assigning uninitialized values.
|
||||
.It core.uninitialized.Branch
|
||||
Check for uninitialized values used as branch conditions.
|
||||
.It core.uninitialized.CapturedBlockVariable
|
||||
Check for blocks that capture uninitialized values.
|
||||
.It core.uninitialized.UndefReturn
|
||||
Check for uninitialized values being returned to the caller.
|
||||
.It deadcode.DeadStores
|
||||
Check for values stored to variables that are never read afterwards.
|
||||
.It debug.DumpCFG
|
||||
Display Control-Flow Graphs.
|
||||
.It debug.DumpCallGraph
|
||||
Display Call Graph.
|
||||
.It debug.DumpDominators
|
||||
Print the dominance tree for a given Control-Flow Graph.
|
||||
.It debug.DumpLiveVars
|
||||
Print results of live variable analysis.
|
||||
.It debug.Stats
|
||||
Emit warnings with analyzer statistics.
|
||||
.It debug.TaintTest
|
||||
Mark tainted symbols as such.
|
||||
.It debug.ViewCFG
|
||||
View Control-Flow Graphs using
|
||||
.Ic GraphViz .
|
||||
.It debug.ViewCallGraph
|
||||
View Call Graph using
|
||||
.Ic GraphViz .
|
||||
.It llvm.Conventions
|
||||
Check code for LLVM codebase conventions.
|
||||
.It osx.API
|
||||
Check for proper uses of various Mac OS X APIs.
|
||||
.It osx.AtomicCAS
|
||||
Evaluate calls to
|
||||
.Vt OSAtomic
|
||||
functions.
|
||||
.It osx.SecKeychainAPI
|
||||
Check for proper uses of Secure Keychain APIs.
|
||||
.It osx.cocoa.AtSync
|
||||
Check for null pointers used as mutexes for @synchronized.
|
||||
.It osx.cocoa.ClassRelease
|
||||
Check for sending
|
||||
.Ql retain ,
|
||||
.Ql release,
|
||||
or
|
||||
.Ql autorelease
|
||||
directly to a Class.
|
||||
.It osx.cocoa.IncompatibleMethodTypes
|
||||
Warn about Objective-C method signatures with type incompatibilities.
|
||||
.It osx.cocoa.NSAutoreleasePool
|
||||
Warn for suboptimal uses of
|
||||
.Vt NSAutoreleasePool
|
||||
in Objective-C GC mode.
|
||||
.It osx.cocoa.NSError
|
||||
Check usage of NSError** parameters.
|
||||
.It osx.cocoa.NilArg
|
||||
Check for prohibited nil arguments to Objective-C method calls.
|
||||
.It osx.cocoa.RetainCount
|
||||
Check for leaks and improper reference count management.
|
||||
.It osx.cocoa.SelfInit
|
||||
Check that
|
||||
.Ql self
|
||||
is properly initialized inside an initializer method.
|
||||
.It osx.cocoa.UnusedIvars
|
||||
Warn about private ivars that are never used.
|
||||
.It osx.cocoa.VariadicMethodTypes
|
||||
Check for passing non-Objective-C types to variadic methods that expect only Objective-C types.
|
||||
.It osx.coreFoundation.CFError
|
||||
Check usage of CFErrorRef* parameters.
|
||||
.It osx.coreFoundation.CFNumber
|
||||
Check for proper uses of
|
||||
.Fn CFNumberCreate .
|
||||
.It osx.coreFoundation.CFRetainRelease
|
||||
Check for null arguments to
|
||||
.Fn CFRetain ,
|
||||
.Fn CFRelease ,
|
||||
and
|
||||
.Fn CFMakeCollectable .
|
||||
.It osx.coreFoundation.containers.OutOfBounds
|
||||
Checks for index out-of-bounds when using the
|
||||
.Vt CFArray
|
||||
API.
|
||||
.It osx.coreFoundation.containers.PointerSizedValues
|
||||
Warns if
|
||||
.Vt CFArray ,
|
||||
.Vt CFDictionary ,
|
||||
or
|
||||
.Vt CFSet
|
||||
are created with non-pointer-size values.
|
||||
.It security.FloatLoopCounter
|
||||
Warn on using a floating point value as a loop counter (CERT: FLP30-C, FLP30-CPP).
|
||||
.It security.insecureAPI.UncheckedReturn
|
||||
Warn on uses of functions whose return values must be always checked.
|
||||
.It security.insecureAPI.getpw
|
||||
Warn on uses of
|
||||
.Fn getpw .
|
||||
.It security.insecureAPI.gets
|
||||
Warn on uses of
|
||||
.Fn gets .
|
||||
.It security.insecureAPI.mkstemp
|
||||
Warn when
|
||||
.Fn mkstemp
|
||||
is passed fewer than 6 X's in the format string.
|
||||
.It security.insecureAPI.mktemp
|
||||
Warn on uses of
|
||||
.Fn mktemp .
|
||||
.It security.insecureAPI.rand
|
||||
Warn on uses of
|
||||
.Fn rand ,
|
||||
.Fn random ,
|
||||
and related functions.
|
||||
.It security.insecureAPI.strcpy
|
||||
Warn on uses of
|
||||
.Fn strcpy
|
||||
and
|
||||
.Fn strcat .
|
||||
.It security.insecureAPI.vfork
|
||||
Warn on uses of
|
||||
.Fn vfork .
|
||||
.It unix.API
|
||||
Check calls to various UNIX/Posix functions.
|
||||
.It unix.Malloc
|
||||
Check for memory leaks, double free, and use-after-free.
|
||||
.It unix.cstring.BadSizeArg
|
||||
Check the size argument passed into C string functions for common
|
||||
erroneous patterns.
|
||||
.It unix.cstring.NullArg
|
||||
Check for null pointers being passed as arguments to C string functions.
|
||||
.El
|
||||
.\"
|
||||
.Sh EXAMPLE
|
||||
.Ic scan-build -o /tmp/myhtmldir make -j4
|
||||
.Pp
|
||||
The above example causes analysis reports to be deposited into
|
||||
a subdirectory of
|
||||
.Pa /tmp/myhtmldir
|
||||
and to run
|
||||
.Ic make
|
||||
with the
|
||||
.Fl j4
|
||||
option.
|
||||
A different subdirectory is created each time
|
||||
.Nm
|
||||
analyzes a project.
|
||||
The analyzer should support most parallel builds, but not distributed builds.
|
||||
.Sh AUTHORS
|
||||
.Nm
|
||||
was written by
|
||||
.An "Ted Kremenek" .
|
||||
Documentation contributed by
|
||||
.An "James K. Lowden" Aq jklowden@schemamania.org .
|
||||
@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
desc = """Generate the difference of two YAML files into a new YAML file (works on
|
||||
pair of directories too). A new attribute 'Added' is set to True or False
|
||||
depending whether the entry is added or removed from the first input to the
|
||||
next.
|
||||
|
||||
The tools requires PyYAML."""
|
||||
|
||||
import yaml
|
||||
|
||||
# Try to use the C parser.
|
||||
try:
|
||||
from yaml import CLoader as Loader
|
||||
except ImportError:
|
||||
from yaml import Loader
|
||||
|
||||
import optrecord
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description=desc)
|
||||
parser.add_argument(
|
||||
"yaml_dir_or_file_1",
|
||||
help="An optimization record file or a directory searched for optimization "
|
||||
"record files that are used as the old version for the comparison",
|
||||
)
|
||||
parser.add_argument(
|
||||
"yaml_dir_or_file_2",
|
||||
help="An optimization record file or a directory searched for optimization "
|
||||
"record files that are used as the new version for the comparison",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--jobs",
|
||||
"-j",
|
||||
default=None,
|
||||
type=int,
|
||||
help="Max job count (defaults to %(default)s, the current CPU count)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-size",
|
||||
"-m",
|
||||
default=100000,
|
||||
type=int,
|
||||
help="Maximum number of remarks stored in an output file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-progress-indicator",
|
||||
"-n",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Do not display any indicator of how many YAML files were read.",
|
||||
)
|
||||
parser.add_argument("--output", "-o", default="diff{}.opt.yaml")
|
||||
args = parser.parse_args()
|
||||
|
||||
files1 = optrecord.find_opt_files(args.yaml_dir_or_file_1)
|
||||
files2 = optrecord.find_opt_files(args.yaml_dir_or_file_2)
|
||||
|
||||
print_progress = not args.no_progress_indicator
|
||||
all_remarks1, _, _ = optrecord.gather_results(files1, args.jobs, print_progress)
|
||||
all_remarks2, _, _ = optrecord.gather_results(files2, args.jobs, print_progress)
|
||||
|
||||
added = set(all_remarks2.values()) - set(all_remarks1.values())
|
||||
removed = set(all_remarks1.values()) - set(all_remarks2.values())
|
||||
|
||||
for r in added:
|
||||
r.Added = True
|
||||
for r in removed:
|
||||
r.Added = False
|
||||
|
||||
result = list(added | removed)
|
||||
for r in result:
|
||||
r.recover_yaml_structure()
|
||||
|
||||
for i in range(0, len(result), args.max_size):
|
||||
with open(args.output.format(i / args.max_size), "w") as stream:
|
||||
yaml.dump_all(result[i : i + args.max_size], stream)
|
||||
@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
desc = """Generate statistics about optimization records from the YAML files
|
||||
generated with -fsave-optimization-record and -fdiagnostics-show-hotness.
|
||||
|
||||
The tools requires PyYAML and Pygments Python packages."""
|
||||
|
||||
import optrecord
|
||||
import argparse
|
||||
import operator
|
||||
from collections import defaultdict
|
||||
from multiprocessing import cpu_count, Pool
|
||||
|
||||
try:
|
||||
from guppy import hpy
|
||||
|
||||
hp = hpy()
|
||||
except ImportError:
|
||||
print("Memory consumption not shown because guppy is not installed")
|
||||
hp = None
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description=desc)
|
||||
parser.add_argument(
|
||||
"yaml_dirs_or_files",
|
||||
nargs="+",
|
||||
help="List of optimization record files or directories searched "
|
||||
"for optimization record files.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--jobs",
|
||||
"-j",
|
||||
default=None,
|
||||
type=int,
|
||||
help="Max job count (defaults to %(default)s, the current CPU count)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-progress-indicator",
|
||||
"-n",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Do not display any indicator of how many YAML files were read.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
print_progress = not args.no_progress_indicator
|
||||
|
||||
files = optrecord.find_opt_files(*args.yaml_dirs_or_files)
|
||||
if not files:
|
||||
parser.error("No *.opt.yaml files found")
|
||||
sys.exit(1)
|
||||
|
||||
all_remarks, file_remarks, _ = optrecord.gather_results(
|
||||
files, args.jobs, print_progress
|
||||
)
|
||||
if print_progress:
|
||||
print("\n")
|
||||
|
||||
bypass = defaultdict(int)
|
||||
byname = defaultdict(int)
|
||||
for r in optrecord.itervalues(all_remarks):
|
||||
bypass[r.Pass] += 1
|
||||
byname[r.Pass + "/" + r.Name] += 1
|
||||
|
||||
total = len(all_remarks)
|
||||
print("{:24s} {:10d}".format("Total number of remarks", total))
|
||||
if hp:
|
||||
h = hp.heap()
|
||||
print("{:24s} {:10d}".format("Memory per remark", h.size / len(all_remarks)))
|
||||
print("\n")
|
||||
|
||||
print("Top 10 remarks by pass:")
|
||||
for (passname, count) in sorted(
|
||||
bypass.items(), key=operator.itemgetter(1), reverse=True
|
||||
)[:10]:
|
||||
print(" {:30s} {:2.0f}%".format(passname, count * 100.0 / total))
|
||||
|
||||
print("\nTop 10 remarks:")
|
||||
for (name, count) in sorted(
|
||||
byname.items(), key=operator.itemgetter(1), reverse=True
|
||||
)[:10]:
|
||||
print(" {:30s} {:2.0f}%".format(name, count * 100.0 / total))
|
||||
@ -0,0 +1,471 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import functools
|
||||
import html
|
||||
import io
|
||||
from multiprocessing import cpu_count
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from pygments import highlight
|
||||
from pygments.lexers.c_cpp import CppLexer
|
||||
from pygments.formatters import HtmlFormatter
|
||||
|
||||
import optpmap
|
||||
import optrecord
|
||||
|
||||
|
||||
desc = """Generate HTML output to visualize optimization records from the YAML files
|
||||
generated with -fsave-optimization-record and -fdiagnostics-show-hotness.
|
||||
|
||||
The tools requires PyYAML and Pygments Python packages."""
|
||||
|
||||
|
||||
# This allows passing the global context to the child processes.
|
||||
class Context:
|
||||
def __init__(self, caller_loc=dict()):
|
||||
# Map function names to their source location for function where inlining happened
|
||||
self.caller_loc = caller_loc
|
||||
|
||||
|
||||
context = Context()
|
||||
|
||||
|
||||
def suppress(remark):
|
||||
if remark.Name == "sil.Specialized":
|
||||
return remark.getArgDict()["Function"][0].startswith('"Swift.')
|
||||
elif remark.Name == "sil.Inlined":
|
||||
return remark.getArgDict()["Callee"][0].startswith(
|
||||
('"Swift.', '"specialized Swift.')
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
class SourceFileRenderer:
|
||||
def __init__(self, source_dir, output_dir, filename, no_highlight):
|
||||
self.filename = filename
|
||||
existing_filename = None
|
||||
if os.path.exists(filename):
|
||||
existing_filename = filename
|
||||
else:
|
||||
fn = os.path.join(source_dir, filename)
|
||||
if os.path.exists(fn):
|
||||
existing_filename = fn
|
||||
|
||||
self.no_highlight = no_highlight
|
||||
self.stream = io.open(
|
||||
os.path.join(output_dir, optrecord.html_file_name(filename)),
|
||||
"w",
|
||||
encoding="utf-8",
|
||||
)
|
||||
if existing_filename:
|
||||
self.source_stream = io.open(existing_filename, encoding="utf-8")
|
||||
else:
|
||||
self.source_stream = None
|
||||
print(
|
||||
"""
|
||||
<html>
|
||||
<h1>Unable to locate file {}</h1>
|
||||
</html>
|
||||
""".format(
|
||||
filename
|
||||
),
|
||||
file=self.stream,
|
||||
)
|
||||
|
||||
self.html_formatter = HtmlFormatter(encoding="utf-8")
|
||||
self.cpp_lexer = CppLexer(stripnl=False)
|
||||
|
||||
def render_source_lines(self, stream, line_remarks):
|
||||
file_text = stream.read()
|
||||
|
||||
if self.no_highlight:
|
||||
html_highlighted = file_text
|
||||
else:
|
||||
html_highlighted = highlight(file_text, self.cpp_lexer, self.html_formatter)
|
||||
|
||||
# Note that the API is different between Python 2 and 3. On
|
||||
# Python 3, pygments.highlight() returns a bytes object, so we
|
||||
# have to decode. On Python 2, the output is str but since we
|
||||
# support unicode characters and the output streams is unicode we
|
||||
# decode too.
|
||||
html_highlighted = html_highlighted.decode("utf-8")
|
||||
|
||||
# Take off the header and footer, these must be
|
||||
# reapplied line-wise, within the page structure
|
||||
html_highlighted = html_highlighted.replace(
|
||||
'<div class="highlight"><pre>', ""
|
||||
)
|
||||
html_highlighted = html_highlighted.replace("</pre></div>", "")
|
||||
|
||||
for (linenum, html_line) in enumerate(html_highlighted.split("\n"), start=1):
|
||||
print(
|
||||
"""
|
||||
<tr>
|
||||
<td><a name=\"L{linenum}\">{linenum}</a></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td><div class="highlight"><pre>{html_line}</pre></div></td>
|
||||
</tr>""".format(
|
||||
**locals()
|
||||
),
|
||||
file=self.stream,
|
||||
)
|
||||
|
||||
for remark in line_remarks.get(linenum, []):
|
||||
if not suppress(remark):
|
||||
self.render_inline_remarks(remark, html_line)
|
||||
|
||||
def render_inline_remarks(self, r, line):
|
||||
inlining_context = r.DemangledFunctionName
|
||||
dl = context.caller_loc.get(r.Function)
|
||||
if dl:
|
||||
dl_dict = dict(list(dl))
|
||||
link = optrecord.make_link(dl_dict["File"], dl_dict["Line"] - 2)
|
||||
inlining_context = "<a href={link}>{r.DemangledFunctionName}</a>".format(
|
||||
**locals()
|
||||
)
|
||||
|
||||
# Column is the number of characters *including* tabs, keep those and
|
||||
# replace everything else with spaces.
|
||||
indent = line[: max(r.Column, 1) - 1]
|
||||
indent = re.sub("\S", " ", indent)
|
||||
|
||||
# Create expanded message and link if we have a multiline message.
|
||||
lines = r.message.split("\n")
|
||||
if len(lines) > 1:
|
||||
expand_link = '<a style="text-decoration: none;" href="" onclick="toggleExpandedMessage(this); return false;">+</a>'
|
||||
message = lines[0]
|
||||
expand_message = """
|
||||
<div class="full-info" style="display:none;">
|
||||
<div class="col-left"><pre style="display:inline">{}</pre></div>
|
||||
<div class="expanded col-left"><pre>{}</pre></div>
|
||||
</div>""".format(
|
||||
indent, "\n".join(lines[1:])
|
||||
)
|
||||
else:
|
||||
expand_link = ""
|
||||
expand_message = ""
|
||||
message = r.message
|
||||
print(
|
||||
"""
|
||||
<tr>
|
||||
<td></td>
|
||||
<td>{r.RelativeHotness}</td>
|
||||
<td class=\"column-entry-{r.color}\">{r.PassWithDiffPrefix}</td>
|
||||
<td><pre style="display:inline">{indent}</pre><span class=\"column-entry-yellow\">{expand_link} {message} </span>{expand_message}</td>
|
||||
<td class=\"column-entry-yellow\">{inlining_context}</td>
|
||||
</tr>""".format(
|
||||
**locals()
|
||||
),
|
||||
file=self.stream,
|
||||
)
|
||||
|
||||
def render(self, line_remarks):
|
||||
if not self.source_stream:
|
||||
return
|
||||
|
||||
print(
|
||||
"""
|
||||
<html>
|
||||
<title>{}</title>
|
||||
<meta charset="utf-8" />
|
||||
<head>
|
||||
<link rel='stylesheet' type='text/css' href='style.css'>
|
||||
<script type="text/javascript">
|
||||
/* Simple helper to show/hide the expanded message of a remark. */
|
||||
function toggleExpandedMessage(e) {{
|
||||
var FullTextElems = e.parentElement.parentElement.getElementsByClassName("full-info");
|
||||
if (!FullTextElems || FullTextElems.length < 1) {{
|
||||
return false;
|
||||
}}
|
||||
var FullText = FullTextElems[0];
|
||||
if (FullText.style.display == 'none') {{
|
||||
e.innerHTML = '-';
|
||||
FullText.style.display = 'block';
|
||||
}} else {{
|
||||
e.innerHTML = '+';
|
||||
FullText.style.display = 'none';
|
||||
}}
|
||||
}}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="centered">
|
||||
<table class="source">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width: 2%">Line</td>
|
||||
<th style="width: 3%">Hotness</td>
|
||||
<th style="width: 10%">Optimization</td>
|
||||
<th style="width: 70%">Source</td>
|
||||
<th style="width: 15%">Inline Context</td>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>""".format(
|
||||
os.path.basename(self.filename)
|
||||
),
|
||||
file=self.stream,
|
||||
)
|
||||
self.render_source_lines(self.source_stream, line_remarks)
|
||||
|
||||
print(
|
||||
"""
|
||||
</tbody>
|
||||
</table>
|
||||
</body>
|
||||
</html>""",
|
||||
file=self.stream,
|
||||
)
|
||||
|
||||
|
||||
class IndexRenderer:
|
||||
def __init__(
|
||||
self, output_dir, should_display_hotness, max_hottest_remarks_on_index
|
||||
):
|
||||
self.stream = io.open(
|
||||
os.path.join(output_dir, "index.html"), "w", encoding="utf-8"
|
||||
)
|
||||
self.should_display_hotness = should_display_hotness
|
||||
self.max_hottest_remarks_on_index = max_hottest_remarks_on_index
|
||||
|
||||
def render_entry(self, r, odd):
|
||||
escaped_name = html.escape(r.DemangledFunctionName)
|
||||
print(
|
||||
"""
|
||||
<tr>
|
||||
<td class=\"column-entry-{odd}\"><a href={r.Link}>{r.DebugLocString}</a></td>
|
||||
<td class=\"column-entry-{odd}\">{r.RelativeHotness}</td>
|
||||
<td class=\"column-entry-{odd}\">{escaped_name}</td>
|
||||
<td class=\"column-entry-{r.color}\">{r.PassWithDiffPrefix}</td>
|
||||
</tr>""".format(
|
||||
**locals()
|
||||
),
|
||||
file=self.stream,
|
||||
)
|
||||
|
||||
def render(self, all_remarks):
|
||||
print(
|
||||
"""
|
||||
<html>
|
||||
<meta charset="utf-8" />
|
||||
<head>
|
||||
<link rel='stylesheet' type='text/css' href='style.css'>
|
||||
</head>
|
||||
<body>
|
||||
<div class="centered">
|
||||
<table>
|
||||
<tr>
|
||||
<td>Source Location</td>
|
||||
<td>Hotness</td>
|
||||
<td>Function</td>
|
||||
<td>Pass</td>
|
||||
</tr>""",
|
||||
file=self.stream,
|
||||
)
|
||||
|
||||
max_entries = None
|
||||
if self.should_display_hotness:
|
||||
max_entries = self.max_hottest_remarks_on_index
|
||||
|
||||
for i, remark in enumerate(all_remarks[:max_entries]):
|
||||
if not suppress(remark):
|
||||
self.render_entry(remark, i % 2)
|
||||
print(
|
||||
"""
|
||||
</table>
|
||||
</body>
|
||||
</html>""",
|
||||
file=self.stream,
|
||||
)
|
||||
|
||||
|
||||
def _render_file(source_dir, output_dir, ctx, no_highlight, entry, filter_):
|
||||
global context
|
||||
context = ctx
|
||||
filename, remarks = entry
|
||||
SourceFileRenderer(source_dir, output_dir, filename, no_highlight).render(remarks)
|
||||
|
||||
|
||||
def map_remarks(all_remarks):
|
||||
# Set up a map between function names and their source location for
|
||||
# function where inlining happened
|
||||
for remark in optrecord.itervalues(all_remarks):
|
||||
if (
|
||||
isinstance(remark, optrecord.Passed)
|
||||
and remark.Pass == "inline"
|
||||
and remark.Name == "Inlined"
|
||||
):
|
||||
for arg in remark.Args:
|
||||
arg_dict = dict(list(arg))
|
||||
caller = arg_dict.get("Caller")
|
||||
if caller:
|
||||
try:
|
||||
context.caller_loc[caller] = arg_dict["DebugLoc"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
def generate_report(
|
||||
all_remarks,
|
||||
file_remarks,
|
||||
source_dir,
|
||||
output_dir,
|
||||
no_highlight,
|
||||
should_display_hotness,
|
||||
max_hottest_remarks_on_index,
|
||||
num_jobs,
|
||||
should_print_progress,
|
||||
):
|
||||
try:
|
||||
os.makedirs(output_dir)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST and os.path.isdir(output_dir):
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
if should_print_progress:
|
||||
print("Rendering index page...")
|
||||
if should_display_hotness:
|
||||
sorted_remarks = sorted(
|
||||
optrecord.itervalues(all_remarks),
|
||||
key=lambda r: (
|
||||
r.Hotness,
|
||||
r.File,
|
||||
r.Line,
|
||||
r.Column,
|
||||
r.PassWithDiffPrefix,
|
||||
r.yaml_tag,
|
||||
r.Function,
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
else:
|
||||
sorted_remarks = sorted(
|
||||
optrecord.itervalues(all_remarks),
|
||||
key=lambda r: (
|
||||
r.File,
|
||||
r.Line,
|
||||
r.Column,
|
||||
r.PassWithDiffPrefix,
|
||||
r.yaml_tag,
|
||||
r.Function,
|
||||
),
|
||||
)
|
||||
IndexRenderer(
|
||||
output_dir, should_display_hotness, max_hottest_remarks_on_index
|
||||
).render(sorted_remarks)
|
||||
|
||||
shutil.copy(
|
||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), "style.css"),
|
||||
output_dir,
|
||||
)
|
||||
|
||||
_render_file_bound = functools.partial(
|
||||
_render_file, source_dir, output_dir, context, no_highlight
|
||||
)
|
||||
if should_print_progress:
|
||||
print("Rendering HTML files...")
|
||||
optpmap.pmap(
|
||||
_render_file_bound, file_remarks.items(), num_jobs, should_print_progress
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=desc)
|
||||
parser.add_argument(
|
||||
"yaml_dirs_or_files",
|
||||
nargs="+",
|
||||
help="List of optimization record files or directories searched "
|
||||
"for optimization record files.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-dir",
|
||||
"-o",
|
||||
default="html",
|
||||
help="Path to a directory where generated HTML files will be output. "
|
||||
"If the directory does not already exist, it will be created. "
|
||||
'"%(default)s" by default.',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--jobs",
|
||||
"-j",
|
||||
default=None,
|
||||
type=int,
|
||||
help="Max job count (defaults to %(default)s, the current CPU count)",
|
||||
)
|
||||
parser.add_argument("--source-dir", "-s", default="", help="set source directory")
|
||||
parser.add_argument(
|
||||
"--no-progress-indicator",
|
||||
"-n",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Do not display any indicator of how many YAML files were read "
|
||||
"or rendered into HTML.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-hottest-remarks-on-index",
|
||||
default=1000,
|
||||
type=int,
|
||||
help="Maximum number of the hottest remarks to appear on the index page",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-highlight",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Do not use a syntax highlighter when rendering the source code",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--demangler",
|
||||
help="Set the demangler to be used (defaults to %s)"
|
||||
% optrecord.Remark.default_demangler,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--filter",
|
||||
default="",
|
||||
help="Only display remarks from passes matching filter expression",
|
||||
)
|
||||
|
||||
# Do not make this a global variable. Values needed to be propagated through
|
||||
# to individual classes and functions to be portable with multiprocessing across
|
||||
# Windows and non-Windows.
|
||||
args = parser.parse_args()
|
||||
|
||||
print_progress = not args.no_progress_indicator
|
||||
if args.demangler:
|
||||
optrecord.Remark.set_demangler(args.demangler)
|
||||
|
||||
files = optrecord.find_opt_files(*args.yaml_dirs_or_files)
|
||||
if not files:
|
||||
parser.error("No *.opt.yaml files found")
|
||||
sys.exit(1)
|
||||
|
||||
all_remarks, file_remarks, should_display_hotness = optrecord.gather_results(
|
||||
files, args.jobs, print_progress, args.filter
|
||||
)
|
||||
|
||||
map_remarks(all_remarks)
|
||||
|
||||
generate_report(
|
||||
all_remarks,
|
||||
file_remarks,
|
||||
args.source_dir,
|
||||
args.output_dir,
|
||||
args.no_highlight,
|
||||
should_display_hotness,
|
||||
args.max_hottest_remarks_on_index,
|
||||
args.jobs,
|
||||
print_progress,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -0,0 +1,63 @@
|
||||
import sys
|
||||
import multiprocessing
|
||||
|
||||
|
||||
_current = None
|
||||
_total = None
|
||||
|
||||
|
||||
def _init(current, total):
|
||||
global _current
|
||||
global _total
|
||||
_current = current
|
||||
_total = total
|
||||
|
||||
|
||||
def _wrapped_func(func_and_args):
|
||||
func, argument, should_print_progress, filter_ = func_and_args
|
||||
|
||||
if should_print_progress:
|
||||
with _current.get_lock():
|
||||
_current.value += 1
|
||||
sys.stdout.write("\r\t{} of {}".format(_current.value, _total.value))
|
||||
sys.stdout.flush()
|
||||
|
||||
return func(argument, filter_)
|
||||
|
||||
|
||||
def pmap(
|
||||
func, iterable, processes, should_print_progress, filter_=None, *args, **kwargs
|
||||
):
|
||||
"""
|
||||
A parallel map function that reports on its progress.
|
||||
|
||||
Applies `func` to every item of `iterable` and return a list of the
|
||||
results. If `processes` is greater than one, a process pool is used to run
|
||||
the functions in parallel. `should_print_progress` is a boolean value that
|
||||
indicates whether a string 'N of M' should be printed to indicate how many
|
||||
of the functions have finished being run.
|
||||
"""
|
||||
global _current
|
||||
global _total
|
||||
_current = multiprocessing.Value("i", 0)
|
||||
_total = multiprocessing.Value("i", len(iterable))
|
||||
|
||||
func_and_args = [(func, arg, should_print_progress, filter_) for arg in iterable]
|
||||
if processes == 1:
|
||||
result = list(map(_wrapped_func, func_and_args, *args, **kwargs))
|
||||
else:
|
||||
pool = multiprocessing.Pool(
|
||||
initializer=_init,
|
||||
initargs=(
|
||||
_current,
|
||||
_total,
|
||||
),
|
||||
processes=processes,
|
||||
)
|
||||
result = pool.map(_wrapped_func, func_and_args, *args, **kwargs)
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
if should_print_progress:
|
||||
sys.stdout.write("\r")
|
||||
return result
|
||||
@ -0,0 +1,218 @@
|
||||
.source {
|
||||
table-layout: fixed;
|
||||
width: 100%;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.source td {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
.red {
|
||||
background-color: #ffd0d0;
|
||||
}
|
||||
.cyan {
|
||||
background-color: cyan;
|
||||
}
|
||||
body {
|
||||
font-family: -apple-system, sans-serif;
|
||||
}
|
||||
pre {
|
||||
margin-top: 0px !important;
|
||||
margin-bottom: 0px !important;
|
||||
}
|
||||
.source-name-title {
|
||||
padding: 5px 10px;
|
||||
border-bottom: 1px solid #dbdbdb;
|
||||
background-color: #eee;
|
||||
line-height: 35px;
|
||||
}
|
||||
.centered {
|
||||
display: table;
|
||||
margin-left: left;
|
||||
margin-right: auto;
|
||||
border: 1px solid #dbdbdb;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.expansion-view {
|
||||
background-color: rgba(0, 0, 0, 0);
|
||||
margin-left: 0px;
|
||||
margin-top: 5px;
|
||||
margin-right: 5px;
|
||||
margin-bottom: 5px;
|
||||
border: 1px solid #dbdbdb;
|
||||
border-radius: 3px;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
.light-row {
|
||||
background: #ffffff;
|
||||
border: 1px solid #dbdbdb;
|
||||
}
|
||||
.column-entry {
|
||||
text-align: right;
|
||||
}
|
||||
.column-entry-left {
|
||||
text-align: left;
|
||||
}
|
||||
.column-entry-white {
|
||||
text-align: right;
|
||||
background-color: #ffffff;
|
||||
}
|
||||
.column-entry-red {
|
||||
text-align: right;
|
||||
background-color: #ffd0d0;
|
||||
}
|
||||
.column-entry-green {
|
||||
text-align: right;
|
||||
background-color: #d0ffd0;
|
||||
}
|
||||
.column-entry-yellow {
|
||||
text-align: left;
|
||||
background-color: #ffe1a6;
|
||||
}
|
||||
.column-entry-0 {
|
||||
background-color: #ffffff;
|
||||
}
|
||||
.column-entry-1 {
|
||||
background-color: #eeeeee;
|
||||
}
|
||||
.line-number {
|
||||
text-align: right;
|
||||
color: #aaa;
|
||||
}
|
||||
.covered-line {
|
||||
text-align: right;
|
||||
color: #0080ff;
|
||||
}
|
||||
.uncovered-line {
|
||||
text-align: right;
|
||||
color: #ff3300;
|
||||
}
|
||||
.tooltip {
|
||||
position: relative;
|
||||
display: inline;
|
||||
background-color: #b3e6ff;
|
||||
text-decoration: none;
|
||||
}
|
||||
.tooltip span.tooltip-content {
|
||||
position: absolute;
|
||||
width: 100px;
|
||||
margin-left: -50px;
|
||||
color: #FFFFFF;
|
||||
background: #000000;
|
||||
height: 30px;
|
||||
line-height: 30px;
|
||||
text-align: center;
|
||||
visibility: hidden;
|
||||
border-radius: 6px;
|
||||
}
|
||||
.tooltip span.tooltip-content:after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 50%;
|
||||
margin-left: -8px;
|
||||
width: 0; height: 0;
|
||||
border-top: 8px solid #000000;
|
||||
border-right: 8px solid transparent;
|
||||
border-left: 8px solid transparent;
|
||||
}
|
||||
:hover.tooltip span.tooltip-content {
|
||||
visibility: visible;
|
||||
opacity: 0.8;
|
||||
bottom: 30px;
|
||||
left: 50%;
|
||||
z-index: 999;
|
||||
}
|
||||
th, td {
|
||||
vertical-align: top;
|
||||
padding: 2px 5px;
|
||||
border-collapse: collapse;
|
||||
border-right: solid 1px #eee;
|
||||
border-left: solid 1px #eee;
|
||||
}
|
||||
td:first-child {
|
||||
border-left: none;
|
||||
}
|
||||
td:last-child {
|
||||
border-right: none;
|
||||
}
|
||||
.expanded {
|
||||
background-color: #f2f2f2;
|
||||
padding-top: 5px;
|
||||
padding-left: 5px;
|
||||
}
|
||||
.col-left {
|
||||
float: left;
|
||||
margin-bottom: -99999px;
|
||||
padding-bottom: 99999px;
|
||||
}
|
||||
|
||||
/* Generated with pygmentize -S colorful -f html >> style.css */
|
||||
|
||||
.hll { background-color: #ffffcc }
|
||||
.c { color: #888888 } /* Comment */
|
||||
.err { color: #FF0000; background-color: #FFAAAA } /* Error */
|
||||
.k { color: #008800; font-weight: bold } /* Keyword */
|
||||
.o { color: #333333 } /* Operator */
|
||||
.ch { color: #888888 } /* Comment.Hashbang */
|
||||
.cm { color: #888888 } /* Comment.Multiline */
|
||||
.cp { color: #557799 } /* Comment.Preproc */
|
||||
.cpf { color: #888888 } /* Comment.PreprocFile */
|
||||
.c1 { color: #888888 } /* Comment.Single */
|
||||
.cs { color: #cc0000; font-weight: bold } /* Comment.Special */
|
||||
.gd { color: #A00000 } /* Generic.Deleted */
|
||||
.ge { font-style: italic } /* Generic.Emph */
|
||||
.gr { color: #FF0000 } /* Generic.Error */
|
||||
.gh { color: #000080; font-weight: bold } /* Generic.Heading */
|
||||
.gi { color: #00A000 } /* Generic.Inserted */
|
||||
.go { color: #888888 } /* Generic.Output */
|
||||
.gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
|
||||
.gs { font-weight: bold } /* Generic.Strong */
|
||||
.gu { color: #800080; font-weight: bold } /* Generic.Subheading */
|
||||
.gt { color: #0044DD } /* Generic.Traceback */
|
||||
.kc { color: #008800; font-weight: bold } /* Keyword.Constant */
|
||||
.kd { color: #008800; font-weight: bold } /* Keyword.Declaration */
|
||||
.kn { color: #008800; font-weight: bold } /* Keyword.Namespace */
|
||||
.kp { color: #003388; font-weight: bold } /* Keyword.Pseudo */
|
||||
.kr { color: #008800; font-weight: bold } /* Keyword.Reserved */
|
||||
.kt { color: #333399; font-weight: bold } /* Keyword.Type */
|
||||
.m { color: #6600EE; font-weight: bold } /* Literal.Number */
|
||||
.s { background-color: #fff0f0 } /* Literal.String */
|
||||
.na { color: #0000CC } /* Name.Attribute */
|
||||
.nb { color: #007020 } /* Name.Builtin */
|
||||
.nc { color: #BB0066; font-weight: bold } /* Name.Class */
|
||||
.no { color: #003366; font-weight: bold } /* Name.Constant */
|
||||
.nd { color: #555555; font-weight: bold } /* Name.Decorator */
|
||||
.ni { color: #880000; font-weight: bold } /* Name.Entity */
|
||||
.ne { color: #FF0000; font-weight: bold } /* Name.Exception */
|
||||
.nf { color: #0066BB; font-weight: bold } /* Name.Function */
|
||||
.nl { color: #997700; font-weight: bold } /* Name.Label */
|
||||
.nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
|
||||
.nt { color: #007700 } /* Name.Tag */
|
||||
.nv { color: #996633 } /* Name.Variable */
|
||||
.ow { color: #000000; font-weight: bold } /* Operator.Word */
|
||||
.w { color: #bbbbbb } /* Text.Whitespace */
|
||||
.mb { color: #6600EE; font-weight: bold } /* Literal.Number.Bin */
|
||||
.mf { color: #6600EE; font-weight: bold } /* Literal.Number.Float */
|
||||
.mh { color: #005588; font-weight: bold } /* Literal.Number.Hex */
|
||||
.mi { color: #0000DD; font-weight: bold } /* Literal.Number.Integer */
|
||||
.mo { color: #4400EE; font-weight: bold } /* Literal.Number.Oct */
|
||||
.sb { background-color: #fff0f0 } /* Literal.String.Backtick */
|
||||
.sc { color: #0044DD } /* Literal.String.Char */
|
||||
.sd { color: #DD4422 } /* Literal.String.Doc */
|
||||
.s2 { background-color: #fff0f0 } /* Literal.String.Double */
|
||||
.se { color: #666666; font-weight: bold; background-color: #fff0f0 } /* Literal.String.Escape */
|
||||
.sh { background-color: #fff0f0 } /* Literal.String.Heredoc */
|
||||
.si { background-color: #eeeeee } /* Literal.String.Interpol */
|
||||
.sx { color: #DD2200; background-color: #fff0f0 } /* Literal.String.Other */
|
||||
.sr { color: #000000; background-color: #fff0ff } /* Literal.String.Regex */
|
||||
.s1 { background-color: #fff0f0 } /* Literal.String.Single */
|
||||
.ss { color: #AA6600 } /* Literal.String.Symbol */
|
||||
.bp { color: #007020 } /* Name.Builtin.Pseudo */
|
||||
.vc { color: #336699 } /* Name.Variable.Class */
|
||||
.vg { color: #dd7700; font-weight: bold } /* Name.Variable.Global */
|
||||
.vi { color: #3333BB } /* Name.Variable.Instance */
|
||||
.il { color: #0000DD; font-weight: bold } /* Literal.Number.Integer.Long */
|
||||
@ -0,0 +1,127 @@
|
||||
Android Order Files Scripts
|
||||
============================
|
||||
|
||||
For the latest version of this doc, please make sure to visit:
|
||||
[Android Order Files Scripts](https://android.googlesource.com/toolchain/llvm_android/+/refs/heads/main/orderfiles/scripts/README.md)
|
||||
|
||||
Getting started with Order files
|
||||
----------------------------------
|
||||
Order files are text files containing symbols representing functions names.
|
||||
Linker (lld) uses order files to layout functions in a specific order.
|
||||
These ordered binaries in Android will reduce page faults and improve a program's launch time due to the efficient loading of symbols during program’s cold-start.
|
||||
|
||||
The scripts described here are used to create and validate order files. You can learn how and when they are used by looking at [Android Order Files](https://android.googlesource.com/toolchain/llvm_android/+/refs/heads/main/orderfiles/README.md).
|
||||
|
||||
File/CSV Format
|
||||
----------------------------------
|
||||
Some arguments in the script allows three formats (File, CSV, or Folder) based on the first character.
|
||||
All formats represent a list of values, which is symbols or files in our case.
|
||||
- File format: The file will have one value per line.
|
||||
Add @ before the filename to show it is a file.
|
||||
If the values are files, the format is (file, weight).
|
||||
Example: @example.txt
|
||||
- CSV format: Use “” (Quotation) around the comma-separated values.
|
||||
Example: “main,foo,bar”
|
||||
- Folder format: Add ^ before the path to the folder.
|
||||
We assume every file in the folder ends with ".orderfile".
|
||||
Example: ^path/to/folder
|
||||
|
||||
Orderfile scripts
|
||||
----------------------------------
|
||||
Following scripts are provided:
|
||||
- [create_orderfile](create_orderfile.py)
|
||||
- [validate_orderfile](validate_orderfile.py)
|
||||
- [merge_orderfile](merge_orderfile.py)
|
||||
|
||||
In order to run the scripts you may need to install the following python3 dependencies:
|
||||
- bitarray
|
||||
- graphviz
|
||||
|
||||
Create Order file
|
||||
----------------------------------
|
||||
You can create an orderfile from a mapping file and profile file.
|
||||
|
||||
```
|
||||
python3 create_orderfile [-h] --profile-file PROFILE_FILE --mapping-file MAPPING_FILE [--output OUTPUT] [--denylist DENYLIST] [--last-symbol LAST_SYMBOL] [--leftover]
|
||||
```
|
||||
|
||||
Flags:
|
||||
- Profile file (--profile-file):
|
||||
- Description: The profile file generated by running a binary compiled with forder-file-instrumentation
|
||||
- Type: String
|
||||
- Required
|
||||
- Mapping file (--mapping-file):
|
||||
- Description: The mapping file generated during compilation that maps MD5 hashes to symbol names
|
||||
- Type: String
|
||||
- Required
|
||||
- Output file (--output):
|
||||
- Description: The output file name for the order file. Default Name: default.orderfile
|
||||
- Type: String
|
||||
- Deny List (--denylist):
|
||||
- Description: Symbols that you want to exclude from the order file
|
||||
- Type: String (File/CSV)
|
||||
- Last symbol (--last-symbol):
|
||||
- Description: The order file will end at the passed last symbol and ignore the symbols after it.
|
||||
If you want an order file only for startup, you should pass the last startup symbol.
|
||||
Last-symbol has priority over leftover so we will output until the last symbol and ignore the leftover flag.
|
||||
- Type: String
|
||||
- Leftover symbols (--leftover):
|
||||
- Description: Some symbols (functions) might not have executed so they will not appear in the profile file.
|
||||
If you want these symbols in your orderfile, you can use this flag and it will add them at the end.
|
||||
- Type: Bool
|
||||
|
||||
Validate Order file
|
||||
----------------------------------
|
||||
Once we get an order file for a library or binary, we need to check if it is valid based on each team’s criteria.
|
||||
To automate this process, we wrote a python script to check the criteria.
|
||||
The current criteria that we allow:
|
||||
- Defining an order priority that needs to be in the orderfile
|
||||
- Symbols that have to be present in orderfile
|
||||
- Symbols that should not be present in orderfile
|
||||
- Minimum number of symbols to make an orderfile good for page layout purposes
|
||||
|
||||
```
|
||||
python3 validate_orderfile [-h] --order-file ORDER_FILE [--partial PARTIAL] [--allowlist ALLOWLIST] [--denylist DENYLIST] [--min MIN]
|
||||
```
|
||||
|
||||
Flags:
|
||||
- Order file (--order-file):
|
||||
- Description: The order file that is being validated on the below criteria
|
||||
- Type: String
|
||||
- Required
|
||||
- Partial Order (--partial):
|
||||
- Description: A partial order of symbols that must be correct in the order file.
|
||||
- Type: String (File/CSV)
|
||||
- Allow List (--allowlist):
|
||||
- Description: Symbols that have to be present in orderfile
|
||||
- Type: String (File/CSV)
|
||||
- Deny List (--denylist):
|
||||
- Description: Symbols that should not be present in orderfile. Denylist flag has priority over allowlist.
|
||||
- Type: String (File/CSV)
|
||||
- Minimum Number of Entries (--min):
|
||||
- Description: Minimum number of symbols to make an orderfile good for page layout purposes
|
||||
- Type: Int
|
||||
|
||||
Merge Order File
|
||||
----------------------------------
|
||||
Any executable running on different devices might not create the same order file due to threads, OS, side effects, etc.
|
||||
As a result, our script will take all the order files and merge them into one order file while trying to maintain locality.
|
||||
As lower end device require better layout for performance boost, you can assign weights to order files and provide lower
|
||||
end device order files with higher weight. You can only assign weights if you use File format and an example can be found
|
||||
in test/merge-test/merge.txt.
|
||||
|
||||
```
|
||||
python3 merge_orderfile [-h] --order-files ORDER_FILES [--output OUTPUT] [--graph-image GRAPH_IMAGE]
|
||||
```
|
||||
|
||||
Flags:
|
||||
- Files (--order-files):
|
||||
- Description: A collection of order files that need to be merged together
|
||||
- Type: String (File/CSV/Folder)
|
||||
- Required
|
||||
- Output (--output):
|
||||
- Description: Provide the output file name for the order file. Default Name: default.orderfile
|
||||
- Type: String
|
||||
- Graph Image (--graph-image):
|
||||
- Description: Provide the output image name for the graph representation of the order files
|
||||
- Type: String
|
||||
@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Sample Usage:
|
||||
# $ python3 create_orderfile.py --profile-file ../orderfiles/test/example.prof --mapping-file ../orderfiles/test/example-mapping.txt
|
||||
#
|
||||
# Try '-h' for a full list of command line arguments.
|
||||
|
||||
import argparse
|
||||
import orderfile_utils
|
||||
|
||||
def parse_args():
|
||||
"""Parses and returns command line arguments."""
|
||||
parser = argparse.ArgumentParser(prog="create_orderfile",
|
||||
description="Create orderfile from profile file and mapping file")
|
||||
|
||||
parser.add_argument(
|
||||
"--profile-file",
|
||||
required=True,
|
||||
help="Parsed profile file that represents the order of the symbol execution")
|
||||
|
||||
parser.add_argument(
|
||||
"--mapping-file",
|
||||
required=True,
|
||||
help="Mapped file that provides the mapping between MD5 hash and symbol name")
|
||||
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
default="default.orderfile",
|
||||
help="Provide the output file name for the order file. Default Name: default.orderfile")
|
||||
|
||||
parser.add_argument(
|
||||
"--denylist",
|
||||
default="",
|
||||
help=f"Exclude symbols based on a symbol-per-line file with @ or comma separarted values within a quotation."
|
||||
f"For example, you can say @file.txt or 'main,bar,foo'")
|
||||
|
||||
parser.add_argument(
|
||||
"--last-symbol",
|
||||
help=f"Create an order file until the passed last symbol and ignore the symbols after it."
|
||||
f"Useful if you want an order file only for startup so you should pass the last startup symbol."
|
||||
f"Last-symbol has priority over leftover so we will output until the last symbol and ignore the leftover flag.")
|
||||
|
||||
parser.add_argument(
|
||||
"--leftover",
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="Add the symbols seen in mapping file but not in profile file at the end")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
symbols = []
|
||||
mapping = {}
|
||||
seen = set()
|
||||
denylist = orderfile_utils.parse_set(args.denylist)
|
||||
|
||||
# Load the MD5 hash mappings of the symbols.
|
||||
with open(args.mapping_file, "r") as f:
|
||||
for line in f:
|
||||
line = line.strip().split()
|
||||
mapping[line[1]] = line[2]
|
||||
|
||||
# Parse the profile file
|
||||
with open(args.profile_file, "r") as f:
|
||||
for line in f:
|
||||
line = line.strip().split()
|
||||
|
||||
# Every line should have 2 MD5 hashes in reverse order (little Endian)
|
||||
# so we need to reverse them to get the actual md5 hashes
|
||||
if len(line) >= 8:
|
||||
md5_1_b_list = line[1:9]
|
||||
md5_2_b_list = line[9:17]
|
||||
|
||||
md5_1_b_list.reverse()
|
||||
md5_2_b_list.reverse()
|
||||
|
||||
md5_1 = "".join(md5_1_b_list)
|
||||
md5_2 = "".join(md5_2_b_list)
|
||||
|
||||
if(md5_1 in mapping):
|
||||
symbol_1 = mapping[md5_1]
|
||||
seen.add(symbol_1)
|
||||
|
||||
if symbol_1 not in denylist:
|
||||
symbols.append(symbol_1)
|
||||
|
||||
if(md5_2 in mapping):
|
||||
symbol_2 = mapping[md5_2]
|
||||
seen.add(symbol_2)
|
||||
|
||||
if symbol_2 not in denylist:
|
||||
symbols.append(symbol_2)
|
||||
|
||||
# Functions in the mapping but not seen in the partial order.
|
||||
# If you want to add them, you can use the leftover flag.
|
||||
# Note: You can only use the leftover flag if the last-symbol flag was not passed
|
||||
if args.leftover and args.last_symbol != None:
|
||||
for md5 in mapping:
|
||||
if mapping[md5] not in seen:
|
||||
symbols.append(mapping[md5])
|
||||
|
||||
# Write it to output file
|
||||
with open(args.output, "w") as f:
|
||||
for symbol in symbols:
|
||||
f.write(symbol+"\n")
|
||||
|
||||
# If we are at the last-symbol, we do not write the rest of the symbols
|
||||
if symbol == args.last_symbol:
|
||||
break
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -0,0 +1,408 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Sample Usage:
|
||||
# $ python3 merge_orderfile.py --order-files %../orderfiles/test
|
||||
#
|
||||
# Try '-h' for a full list of command line arguments.
|
||||
#
|
||||
# Note: We allow three formats: Folder, File, and CSV
|
||||
# As lower end devices require the most help, you can give
|
||||
# their order files a higher weight.
|
||||
# You can only provide weights if you choose File format.
|
||||
# For example, an order file weight of 4 means the edges
|
||||
# in the graph will be multiplied by 4.
|
||||
# CSV and Folder assume all files have a weight of 1.
|
||||
# An example file can be found at ../test/merge-test/merge.txt
|
||||
|
||||
from bitarray import bitarray
|
||||
import argparse
|
||||
import graphviz
|
||||
|
||||
import orderfile_utils
|
||||
|
||||
class Vertex(object):
|
||||
"""Vertex (symbol) in the graph."""
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
self.count = 0
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, Vertex):
|
||||
return self.name == other.name
|
||||
return False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.name)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'{self.name}({self.count})'
|
||||
|
||||
def appears(self) -> None:
|
||||
self.count += 1
|
||||
|
||||
class Graph(object):
|
||||
"""Graph representation of the order files."""
|
||||
def __init__(self) -> None:
|
||||
self.graph = {}
|
||||
self.reverse = {}
|
||||
self.vertices = {}
|
||||
|
||||
def __str__(self) -> str:
|
||||
string = ""
|
||||
for (f_symbol, value) in self.graph.items():
|
||||
for (t_symbol, weight) in self.graph[f_symbol].items():
|
||||
string += f'{f_symbol} --{weight}--> {t_symbol}\n'
|
||||
return string
|
||||
|
||||
def addVertex(self, symbol: str) -> None:
|
||||
if symbol not in self.vertices:
|
||||
v = Vertex(symbol)
|
||||
self.vertices[symbol] = v
|
||||
self.graph[v] = {}
|
||||
self.reverse[v] = {}
|
||||
|
||||
self.vertices[symbol].appears()
|
||||
|
||||
def addEdge(self, from_symbol: str, to_symbol: str) -> None:
|
||||
"""Add an edge (it represents two symbols are consecutive)."""
|
||||
from_vertex = self.vertices.get(from_symbol)
|
||||
to_vertex = self.vertices.get(to_symbol)
|
||||
|
||||
if from_vertex is None:
|
||||
raise RuntimeError(f"Symbol {from_symbol} is not in graph")
|
||||
|
||||
if to_vertex is None:
|
||||
raise RuntimeError(f"Symbol {to_symbol} is not in graph")
|
||||
|
||||
if to_vertex not in self.graph[from_vertex]:
|
||||
self.graph[from_vertex][to_vertex] = 0
|
||||
self.reverse[to_vertex][from_vertex] = 0
|
||||
|
||||
self.graph[from_vertex][to_vertex] += 1
|
||||
self.reverse[to_vertex][from_vertex] += 1
|
||||
|
||||
def removeEdgeCompletely(self, from_symbol: str, to_symbol: str) -> None:
|
||||
"""Remove the edge from the graph"""
|
||||
from_vertex = self.vertices.get(from_symbol)
|
||||
to_vertex = self.vertices.get(to_symbol)
|
||||
|
||||
if from_vertex is None:
|
||||
raise RuntimeError(f"Symbol {from_symbol} is not in graph")
|
||||
|
||||
if to_vertex is None:
|
||||
raise RuntimeError(f"Symbol {to_symbol} is not in graph")
|
||||
|
||||
del self.graph[from_vertex][to_vertex]
|
||||
del self.reverse[to_vertex][from_vertex]
|
||||
|
||||
to_vertex.count -= 1
|
||||
|
||||
def checkVertex(self, symbol: str) -> bool:
|
||||
return symbol in self.vertices
|
||||
|
||||
def checkEdge(self, from_symbol: str, to_symbol: str) -> bool:
|
||||
if not self.checkVertex(from_symbol):
|
||||
return False
|
||||
|
||||
if not self.checkVertex(to_symbol):
|
||||
return False
|
||||
|
||||
from_vertex = self.vertices.get(from_symbol)
|
||||
to_vertex = self.vertices.get(to_symbol)
|
||||
|
||||
if from_vertex not in self.graph:
|
||||
return False
|
||||
|
||||
return to_vertex in self.graph[from_vertex]
|
||||
|
||||
def checkEdgeWeight(self, from_symbol: str, to_symbol: str, weight: str) -> bool:
|
||||
if not self.checkEdge(from_symbol, to_symbol):
|
||||
return False
|
||||
|
||||
from_vertex = self.vertices.get(from_symbol)
|
||||
to_vertex = self.vertices.get(to_symbol)
|
||||
|
||||
return self.graph[from_vertex][to_vertex] == weight
|
||||
|
||||
def getOutEdges(self, symbol: str):
|
||||
"""Graph the out edges for a vertex."""
|
||||
out_edges = []
|
||||
vertex = self.vertices.get(symbol)
|
||||
if vertex is None:
|
||||
raise RuntimeError(f"Symbol {symbol} is not in graph")
|
||||
|
||||
for (key, value) in self.graph[vertex].items():
|
||||
out_edges.append((key, value))
|
||||
|
||||
return out_edges
|
||||
|
||||
def getInEdges(self, symbol: str):
|
||||
"""Graph the in edges for a vertex."""
|
||||
in_edges = []
|
||||
vertex = self.vertices.get(symbol)
|
||||
if vertex is None:
|
||||
raise RuntimeError(f"Symbol {symbol} is not in graph")
|
||||
|
||||
for (key, value) in self.reverse[vertex].items():
|
||||
in_edges.append((key, value))
|
||||
|
||||
return in_edges
|
||||
|
||||
def getRoots(self, reverse=False) -> list[str]:
|
||||
"""Get the roots of the graph (Vertex with no in edges)."""
|
||||
roots = []
|
||||
for (symbol,_) in self.vertices.items():
|
||||
if not reverse:
|
||||
if len(self.getInEdges(symbol)) == 0:
|
||||
roots.append(symbol)
|
||||
else:
|
||||
# If you want the reverse (vertex with no out edges)
|
||||
if len(self.getOutEdges(symbol)) == 0:
|
||||
roots.append(symbol)
|
||||
|
||||
return roots
|
||||
|
||||
def __cyclesUtil(self, vertex: Vertex) -> None:
|
||||
self.visited.add(vertex)
|
||||
self.curr_search.append(vertex)
|
||||
|
||||
for (out_vertex, _) in self.graph[vertex].items():
|
||||
# If vertex already appeared in current depth search, we have a backedge
|
||||
if out_vertex in self.curr_search:
|
||||
# We save save all vertices in the cycle because an edge from the cycle will be removed
|
||||
index = self.curr_search.index(out_vertex)
|
||||
temp_lst = self.curr_search[index:]
|
||||
self.cycles.append(temp_lst)
|
||||
# If vertex visited before in a previous search, we do not need to search from it again
|
||||
elif out_vertex not in self.visited:
|
||||
self.__cyclesUtil(out_vertex)
|
||||
|
||||
self.curr_search.pop()
|
||||
|
||||
def getCycles(self) -> list[list[tuple[str]]]:
|
||||
self.visited = set()
|
||||
self.curr_search = []
|
||||
self.cycles = []
|
||||
lst = []
|
||||
|
||||
for (_, vertex) in self.vertices.items():
|
||||
if vertex not in self.visited:
|
||||
self.__cyclesUtil(vertex)
|
||||
|
||||
return self.cycles
|
||||
|
||||
# Get immediate dominator for each vertex
|
||||
def getDominators(self, post=False):
|
||||
# Create a bitarray for each vertex to showcase which vertices
|
||||
# are dominators
|
||||
num_vertices = len(self.vertices)
|
||||
dominators = {}
|
||||
mapping = []
|
||||
for (_, vertex) in self.vertices.items():
|
||||
mapping.append(vertex)
|
||||
ba = bitarray(num_vertices)
|
||||
ba.setall(True)
|
||||
dominators[vertex] = ba
|
||||
|
||||
# Add the root vertices
|
||||
stack = []
|
||||
roots = self.getRoots(post)
|
||||
for root in roots:
|
||||
stack.append((None, self.vertices[root]))
|
||||
|
||||
while len(stack) != 0:
|
||||
(parent, child) = stack.pop()
|
||||
|
||||
# If no parent, you have no dominators from above
|
||||
# If you have a parent, your dominations is the common dominators
|
||||
# between all parents
|
||||
if parent is None:
|
||||
dominators[child].setall(False)
|
||||
else:
|
||||
dominators[child] &= dominators[parent]
|
||||
|
||||
# You are dominator of yourself
|
||||
index = mapping.index(child)
|
||||
dominators[child][index] = True
|
||||
if not post:
|
||||
for (out_vertex,_) in self.graph[child].items():
|
||||
stack.append((child, out_vertex))
|
||||
else:
|
||||
for (out_vertex,_) in self.reverse[child].items():
|
||||
stack.append((child, out_vertex))
|
||||
|
||||
for (vertex, ba) in dominators.items():
|
||||
# If no Trues in bitarray, you have no immediate dominator
|
||||
# because you are a root vertex. Else, you can find the
|
||||
# most left True vertex excluding yourself
|
||||
index = mapping.index(vertex)
|
||||
ba[index] = False
|
||||
if True not in ba:
|
||||
dominators[vertex] = None
|
||||
else:
|
||||
# Due to reverse, this is the actual index in the initial bitarray
|
||||
dominator_index = ba.index(True)
|
||||
dominators[vertex] = mapping[dominator_index]
|
||||
|
||||
return dominators
|
||||
|
||||
def __printOrderUtil(self, vertex):
|
||||
# If already visit, we do not need to get order
|
||||
if vertex in self.visited:
|
||||
return
|
||||
|
||||
self.order.append(vertex)
|
||||
self.visited.add(vertex)
|
||||
|
||||
# Get out edges and sort them based on their weightage
|
||||
out_edges = self.getOutEdges(vertex.name)
|
||||
out_edges.sort(key = lambda x: x[1], reverse=True)
|
||||
|
||||
# We continue dfs based on the largest weight
|
||||
for (out, _) in out_edges:
|
||||
self.__printOrderUtil(out)
|
||||
|
||||
def printOrder(self, output):
|
||||
self.order = []
|
||||
self.visited = set()
|
||||
stack = []
|
||||
|
||||
# Create an order using DFS from the root
|
||||
for root in self.getRoots():
|
||||
self.__printOrderUtil(self.vertices[root])
|
||||
|
||||
# Write the order to a file
|
||||
with open(output, "w") as f:
|
||||
for vertex in self.order:
|
||||
f.write(f"{vertex.name}\n")
|
||||
|
||||
def exportGraph(self, output: str) -> None:
|
||||
"""Export graph as a dot file and pdf file."""
|
||||
dot = graphviz.Digraph(comment='Graph Representation of Orderfile')
|
||||
|
||||
for (from_vertex, to_vertices) in self.graph.items():
|
||||
for (to_vertex, weight) in to_vertices.items():
|
||||
dot.edge(from_vertex.__str__(), to_vertex.__str__(), label=str(weight))
|
||||
|
||||
dot.render(filename=output)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
"""Parses and returns command line arguments."""
|
||||
|
||||
parser = argparse.ArgumentParser(prog="merge_orderfile",
|
||||
description="Merge Order Files")
|
||||
|
||||
parser.add_argument(
|
||||
"--order-files",
|
||||
required=True,
|
||||
help="A collection of order files that need to be merged together."
|
||||
"Format: A file-per-line file with @, a folder with ^, or comma separated values within a quotation."
|
||||
"For example, you can say @file.txt, ^path/to/folder or '1.orderfile,2.orderfile'.")
|
||||
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
default="default.orderfile",
|
||||
help="Provide the output file name for the order file. Default Name: default.orderfile")
|
||||
|
||||
parser.add_argument(
|
||||
"--graph-image",
|
||||
help="Provide the output image name for the graph representation of the order files.")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def removeCycles(graph: Graph) -> None:
|
||||
# Remove cycles created by combining order files
|
||||
for cycleList in graph.getCycles():
|
||||
# Get the sum of in edge weights for all vertices in the cycle
|
||||
# We exclude the cycle edges from the calculation
|
||||
# For example, cycle = [a,b,c] where cycle_edges=[a->b, b->c, c->a]
|
||||
# in_edges(a) = [main, c]
|
||||
# in_edges(b) = [a]
|
||||
# in_edges(c) = [b]
|
||||
#
|
||||
# Excluding cycle edges:
|
||||
# in_edges(a) = [main] = 1
|
||||
# in_edges(b) = [] = 0
|
||||
# in_edges(c) = [] = 0
|
||||
inner_edges = [graph.getInEdges(vertex.name) for vertex in cycleList]
|
||||
inner_weights = []
|
||||
for inner_edge in inner_edges:
|
||||
total = 0
|
||||
for edge in inner_edge:
|
||||
if edge[0] not in cycleList:
|
||||
total += edge[1]
|
||||
inner_weights.append(total)
|
||||
|
||||
# We remove the cycle edge that leads to the highest sum of in-edges for a vertex
|
||||
# because the vertex has other options for ordering.
|
||||
# In the above example, we remove c->a
|
||||
max_inner_weight = max(inner_weights)
|
||||
index = inner_weights.index(max_inner_weight)
|
||||
prev = index - 1
|
||||
if prev < 0:
|
||||
prev = len(inner_weights) - 1
|
||||
to_vertex = cycleList[index]
|
||||
from_vertex = cycleList[prev]
|
||||
|
||||
graph.removeEdgeCompletely(from_vertex.name, to_vertex.name)
|
||||
|
||||
def addSymbolsToGraph(graph: Graph, order: list[str], weight: int = 1) -> None:
|
||||
prev_symbol = None
|
||||
for symbol in order:
|
||||
graph.addVertex(symbol)
|
||||
|
||||
if prev_symbol is not None:
|
||||
for i in range(weight):
|
||||
graph.addEdge(prev_symbol, symbol)
|
||||
|
||||
prev_symbol = symbol
|
||||
|
||||
def createGraph(files: list[str]) -> Graph:
|
||||
graph = Graph()
|
||||
|
||||
# Create graph representation based on combining the order files
|
||||
for (orderfile, weight) in files:
|
||||
with open(orderfile, "r", encoding="utf-8") as f:
|
||||
lst = []
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
lst.append(line)
|
||||
|
||||
addSymbolsToGraph(graph, lst, weight)
|
||||
|
||||
return graph
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
|
||||
files = orderfile_utils.parse_merge_list(args.order_files)
|
||||
graph = createGraph(files)
|
||||
|
||||
# Assert no cycles after removing them
|
||||
removeCycles(graph)
|
||||
assert(len(graph.getCycles()) == 0)
|
||||
|
||||
# Create an image of the graph representation
|
||||
if args.graph_image:
|
||||
graph.exportGraph(args.graph_image)
|
||||
|
||||
# Create order file from the graph structure
|
||||
graph.printOrder(args.output)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from pathlib import Path
|
||||
import glob
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def parse_set(param : str) -> set[str]:
|
||||
"""Parse symbol set based on a file or comma-separate symbols."""
|
||||
symbol_set = set()
|
||||
if len(param) == 0:
|
||||
return symbol_set
|
||||
|
||||
if param[0] == "@":
|
||||
with open(param[1:], "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
symbol_set.add(line)
|
||||
return symbol_set
|
||||
|
||||
list_symbols = param.split(",")
|
||||
symbol_set.update(list_symbols)
|
||||
return symbol_set
|
||||
|
||||
def parse_list(param : str) -> list[str]:
|
||||
"""Parse partial order based on a file or comma-separate symbols."""
|
||||
symbol_order = []
|
||||
if len(param) == 0:
|
||||
return symbol_order
|
||||
|
||||
if param[0] == "@":
|
||||
with open(param[1:], "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
symbol_order.append(line)
|
||||
return symbol_order
|
||||
|
||||
symbol_order = param.split(",")
|
||||
return symbol_order
|
||||
|
||||
def parse_merge_list(param : str) -> list[tuple[str,int]]:
|
||||
"""Parse partial order based on a file, folder, or comma-separate symbols."""
|
||||
file_list = []
|
||||
if len(param) == 0:
|
||||
return file_list
|
||||
|
||||
if param[0] == "@":
|
||||
file_dir = Path(param[1:]).resolve().parent
|
||||
with open(param[1:], "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
line_list = line.split(",")
|
||||
# Name, Weight
|
||||
file_list.append((file_dir / line_list[0], int(line_list[1])))
|
||||
return file_list
|
||||
|
||||
if param[0] == "^":
|
||||
file_lst = glob.glob(param[1:]+"/*.orderfile")
|
||||
# Assumig weight of 1 for all the files. Sorting of files provides
|
||||
# a deterministic order of orderfile.
|
||||
file_list = sorted([(orderfile, 1) for orderfile in file_lst])
|
||||
return file_list
|
||||
|
||||
file_lst = param.split(",")
|
||||
file_list = [(orderfile, 1) for orderfile in file_lst]
|
||||
return file_list
|
||||
|
||||
def check_call(cmd, *args, **kwargs):
|
||||
"""subprocess.check_call."""
|
||||
subprocess.check_call(cmd, *args, **kwargs)
|
||||
|
||||
|
||||
def check_output(cmd, *args, **kwargs):
|
||||
"""subprocess.check_output."""
|
||||
return subprocess.run(
|
||||
cmd, *args, **kwargs, check=True, text=True,
|
||||
stdout=subprocess.PIPE).stdout
|
||||
|
||||
def check_error(cmd, *args, **kwargs):
|
||||
"""subprocess.check_error."""
|
||||
return subprocess.run(
|
||||
cmd, *args, **kwargs, check=True, text=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout
|
||||
|
||||
def android_build_top():
|
||||
"""Get top directory to find files."""
|
||||
THIS_DIR = os.path.realpath(os.path.dirname(__file__))
|
||||
return os.path.realpath(os.path.join(THIS_DIR, '../../../..'))
|
||||
@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (C) 2023 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Sample Usage:
|
||||
# $ python3 validate_orderfile.py --order-file ../orderfiles/test/example.orderfile
|
||||
#
|
||||
# Try '-h' for a full list of command line arguments.
|
||||
#
|
||||
# Currently, we check four things in an orderfile:
|
||||
# - A partial order is maintained in the orderfile
|
||||
# - All symbols in allowlist must be present in the orderfile
|
||||
# - No symbol in denylist should be present in the orderfile
|
||||
# - The orderfile has a minimum number of symbols
|
||||
|
||||
import argparse
|
||||
import orderfile_utils
|
||||
|
||||
def parse_args():
|
||||
"""Parses and returns command line arguments."""
|
||||
parser = argparse.ArgumentParser(prog="validate_orderfile",
|
||||
description="Validates the orderfile is correct and useful based on flag conditions")
|
||||
|
||||
parser.add_argument(
|
||||
"--order-file",
|
||||
required=True,
|
||||
help="Orderfile that needs to be validated")
|
||||
|
||||
parser.add_argument(
|
||||
"--partial",
|
||||
default="",
|
||||
help=f"A partial order of symbols that need to hold in the orderfile."
|
||||
f"Format: A symbol-per-line file with @ or comma separarted values within a quotation."
|
||||
f"For example, you can say @file.txt or 'main,bar,foo'.")
|
||||
|
||||
parser.add_argument(
|
||||
"--allowlist",
|
||||
default="",
|
||||
help=f"Symbols that have to be present in the orderfile."
|
||||
f"Format: A symbol-per-line file with @ or comma separarted values within a quotation."
|
||||
f"For example, you can say @file.txt or 'main,bar,foo'.")
|
||||
|
||||
parser.add_argument(
|
||||
"--denylist",
|
||||
default="",
|
||||
help=f"Symbols that should not be in orderfile. Denylist flag has priority over allowlist."
|
||||
f"Format: A symbol-per-line file with @ or comma separarted values within a quotation."
|
||||
f"For example, you can say @file.txt or 'main,bar,foo'.")
|
||||
|
||||
parser.add_argument(
|
||||
"--min",
|
||||
type=int,
|
||||
default=0,
|
||||
help="Minimum number of entires needed for an orderfile")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
allowlist = orderfile_utils.parse_set(args.allowlist)
|
||||
partial = orderfile_utils.parse_list(args.partial)
|
||||
denylist = orderfile_utils.parse_set(args.denylist)
|
||||
|
||||
# Check if there are symbols common to both allowlist and denylist
|
||||
# We give priority to denylist so the symbols in the intersection
|
||||
# will be removed from allowlist
|
||||
inter = allowlist.intersection(denylist)
|
||||
allowlist = allowlist.difference(inter)
|
||||
|
||||
num_entries = 0
|
||||
file_indices = {}
|
||||
file_present = set()
|
||||
|
||||
# Read the orderfile
|
||||
with open(args.order_file, "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
|
||||
# Check if a symbol not allowed is within the orderfile
|
||||
if line in denylist:
|
||||
raise RuntimeError(f"Orderfile should not contain {line}")
|
||||
|
||||
if line in allowlist:
|
||||
file_present.add(line)
|
||||
|
||||
file_indices[line] = num_entries
|
||||
num_entries += 1
|
||||
|
||||
# Check if there are not a minimum number of symbols in orderfile
|
||||
if num_entries < args.min:
|
||||
raise RuntimeError(f"The orderfile has {num_entries} symbols but it "
|
||||
f"needs at least {args.min} symbols")
|
||||
|
||||
# Check if all symbols allowed must be allowlist
|
||||
if len(allowlist) != len(file_present):
|
||||
raise RuntimeError("Some symbols in allow-list are not in the orderfile")
|
||||
|
||||
# Check if partial order passed with flag is maintained within orderfile
|
||||
# The partial order might contain symbols not in the orderfile which we allow
|
||||
# because the order is still maintained.
|
||||
old_index = None
|
||||
curr_symbol = None
|
||||
for symbol in partial:
|
||||
new_index = file_indices.get(symbol)
|
||||
if new_index is not None:
|
||||
if old_index is not None:
|
||||
if new_index < old_index:
|
||||
raise RuntimeError(f"`{curr_symbol}` must be before `{symbol}` in orderfile")
|
||||
old_index = new_index
|
||||
curr_symbol = symbol
|
||||
|
||||
print("Order file is valid")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -0,0 +1,62 @@
|
||||
body { color:#000000; background-color:#ffffff }
|
||||
body { font-family: Helvetica, sans-serif; font-size:9pt }
|
||||
h1 { font-size: 14pt; }
|
||||
h2 { font-size: 12pt; }
|
||||
table { font-size:9pt }
|
||||
table { border-spacing: 0px; border: 1px solid black }
|
||||
th, table thead {
|
||||
background-color:#eee; color:#666666;
|
||||
font-weight: bold; cursor: default;
|
||||
text-align:center;
|
||||
font-weight: bold; font-family: Verdana;
|
||||
white-space:nowrap;
|
||||
}
|
||||
.W { font-size:0px }
|
||||
th, td { padding:5px; padding-left:8px; text-align:left }
|
||||
td.SUMM_DESC { padding-left:12px }
|
||||
td.DESC { white-space:pre }
|
||||
td.Q { text-align:right }
|
||||
td { text-align:left }
|
||||
tbody.scrollContent { overflow:auto }
|
||||
|
||||
table.form_group {
|
||||
background-color: #ccc;
|
||||
border: 1px solid #333;
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
table.form_inner_group {
|
||||
background-color: #ccc;
|
||||
border: 1px solid #333;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
table.form {
|
||||
background-color: #999;
|
||||
border: 1px solid #333;
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
td.form_label {
|
||||
text-align: right;
|
||||
vertical-align: top;
|
||||
}
|
||||
/* For one line entires */
|
||||
td.form_clabel {
|
||||
text-align: right;
|
||||
vertical-align: center;
|
||||
}
|
||||
td.form_value {
|
||||
text-align: left;
|
||||
vertical-align: top;
|
||||
}
|
||||
td.form_submit {
|
||||
text-align: right;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
h1.SubmitFail {
|
||||
color: #f00;
|
||||
}
|
||||
h1.SubmitOk {
|
||||
}
|
||||
@ -0,0 +1,492 @@
|
||||
/*
|
||||
SortTable
|
||||
version 2
|
||||
7th April 2007
|
||||
Stuart Langridge, http://www.kryogenix.org/code/browser/sorttable/
|
||||
|
||||
Instructions:
|
||||
Download this file
|
||||
Add <script src="sorttable.js"></script> to your HTML
|
||||
Add class="sortable" to any table you'd like to make sortable
|
||||
Click on the headers to sort
|
||||
|
||||
Thanks to many, many people for contributions and suggestions.
|
||||
Licenced as X11: http://www.kryogenix.org/code/browser/licence.html
|
||||
This basically means: do what you want with it.
|
||||
*/
|
||||
|
||||
|
||||
var stIsIE = /*@cc_on!@*/false;
|
||||
|
||||
sorttable = {
|
||||
init: function() {
|
||||
// quit if this function has already been called
|
||||
if (arguments.callee.done) return;
|
||||
// flag this function so we don't do the same thing twice
|
||||
arguments.callee.done = true;
|
||||
// kill the timer
|
||||
if (_timer) clearInterval(_timer);
|
||||
|
||||
if (!document.createElement || !document.getElementsByTagName) return;
|
||||
|
||||
sorttable.DATE_RE = /^(\d\d?)[\/\.-](\d\d?)[\/\.-]((\d\d)?\d\d)$/;
|
||||
|
||||
forEach(document.getElementsByTagName('table'), function(table) {
|
||||
if (table.className.search(/\bsortable\b/) != -1) {
|
||||
sorttable.makeSortable(table);
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
makeSortable: function(table) {
|
||||
if (table.getElementsByTagName('thead').length == 0) {
|
||||
// table doesn't have a tHead. Since it should have, create one and
|
||||
// put the first table row in it.
|
||||
the = document.createElement('thead');
|
||||
the.appendChild(table.rows[0]);
|
||||
table.insertBefore(the,table.firstChild);
|
||||
}
|
||||
// Safari doesn't support table.tHead, sigh
|
||||
if (table.tHead == null) table.tHead = table.getElementsByTagName('thead')[0];
|
||||
|
||||
if (table.tHead.rows.length != 1) return; // can't cope with two header rows
|
||||
|
||||
// Sorttable v1 put rows with a class of "sortbottom" at the bottom (as
|
||||
// "total" rows, for example). This is B&R, since what you're supposed
|
||||
// to do is put them in a tfoot. So, if there are sortbottom rows,
|
||||
// for backward compatibility, move them to tfoot (creating it if needed).
|
||||
sortbottomrows = [];
|
||||
for (var i=0; i<table.rows.length; i++) {
|
||||
if (table.rows[i].className.search(/\bsortbottom\b/) != -1) {
|
||||
sortbottomrows[sortbottomrows.length] = table.rows[i];
|
||||
}
|
||||
}
|
||||
if (sortbottomrows) {
|
||||
if (table.tFoot == null) {
|
||||
// table doesn't have a tfoot. Create one.
|
||||
tfo = document.createElement('tfoot');
|
||||
table.appendChild(tfo);
|
||||
}
|
||||
for (var i=0; i<sortbottomrows.length; i++) {
|
||||
tfo.appendChild(sortbottomrows[i]);
|
||||
}
|
||||
delete sortbottomrows;
|
||||
}
|
||||
|
||||
// work through each column and calculate its type
|
||||
headrow = table.tHead.rows[0].cells;
|
||||
for (var i=0; i<headrow.length; i++) {
|
||||
// manually override the type with a sorttable_type attribute
|
||||
if (!headrow[i].className.match(/\bsorttable_nosort\b/)) { // skip this col
|
||||
mtch = headrow[i].className.match(/\bsorttable_([a-z0-9]+)\b/);
|
||||
if (mtch) { override = mtch[1]; }
|
||||
if (mtch && typeof sorttable["sort_"+override] == 'function') {
|
||||
headrow[i].sorttable_sortfunction = sorttable["sort_"+override];
|
||||
} else {
|
||||
headrow[i].sorttable_sortfunction = sorttable.guessType(table,i);
|
||||
}
|
||||
// make it clickable to sort
|
||||
headrow[i].sorttable_columnindex = i;
|
||||
headrow[i].sorttable_tbody = table.tBodies[0];
|
||||
dean_addEvent(headrow[i],"click", function(e) {
|
||||
|
||||
if (this.className.search(/\bsorttable_sorted\b/) != -1) {
|
||||
// if we're already sorted by this column, just
|
||||
// reverse the table, which is quicker
|
||||
sorttable.reverse(this.sorttable_tbody);
|
||||
this.className = this.className.replace('sorttable_sorted',
|
||||
'sorttable_sorted_reverse');
|
||||
this.removeChild(document.getElementById('sorttable_sortfwdind'));
|
||||
sortrevind = document.createElement('span');
|
||||
sortrevind.id = "sorttable_sortrevind";
|
||||
sortrevind.innerHTML = stIsIE ? ' <font face="webdings">5</font>' : ' ▴';
|
||||
this.appendChild(sortrevind);
|
||||
return;
|
||||
}
|
||||
if (this.className.search(/\bsorttable_sorted_reverse\b/) != -1) {
|
||||
// if we're already sorted by this column in reverse, just
|
||||
// re-reverse the table, which is quicker
|
||||
sorttable.reverse(this.sorttable_tbody);
|
||||
this.className = this.className.replace('sorttable_sorted_reverse',
|
||||
'sorttable_sorted');
|
||||
this.removeChild(document.getElementById('sorttable_sortrevind'));
|
||||
sortfwdind = document.createElement('span');
|
||||
sortfwdind.id = "sorttable_sortfwdind";
|
||||
sortfwdind.innerHTML = stIsIE ? ' <font face="webdings">6</font>' : ' ▾';
|
||||
this.appendChild(sortfwdind);
|
||||
return;
|
||||
}
|
||||
|
||||
// remove sorttable_sorted classes
|
||||
theadrow = this.parentNode;
|
||||
forEach(theadrow.childNodes, function(cell) {
|
||||
if (cell.nodeType == 1) { // an element
|
||||
cell.className = cell.className.replace('sorttable_sorted_reverse','');
|
||||
cell.className = cell.className.replace('sorttable_sorted','');
|
||||
}
|
||||
});
|
||||
sortfwdind = document.getElementById('sorttable_sortfwdind');
|
||||
if (sortfwdind) { sortfwdind.parentNode.removeChild(sortfwdind); }
|
||||
sortrevind = document.getElementById('sorttable_sortrevind');
|
||||
if (sortrevind) { sortrevind.parentNode.removeChild(sortrevind); }
|
||||
|
||||
this.className += ' sorttable_sorted';
|
||||
sortfwdind = document.createElement('span');
|
||||
sortfwdind.id = "sorttable_sortfwdind";
|
||||
sortfwdind.innerHTML = stIsIE ? ' <font face="webdings">6</font>' : ' ▾';
|
||||
this.appendChild(sortfwdind);
|
||||
|
||||
// build an array to sort. This is a Schwartzian transform thing,
|
||||
// i.e., we "decorate" each row with the actual sort key,
|
||||
// sort based on the sort keys, and then put the rows back in order
|
||||
// which is a lot faster because you only do getInnerText once per row
|
||||
row_array = [];
|
||||
col = this.sorttable_columnindex;
|
||||
rows = this.sorttable_tbody.rows;
|
||||
for (var j=0; j<rows.length; j++) {
|
||||
row_array[row_array.length] = [sorttable.getInnerText(rows[j].cells[col]), rows[j]];
|
||||
}
|
||||
/* If you want a stable sort, uncomment the following line */
|
||||
sorttable.shaker_sort(row_array, this.sorttable_sortfunction);
|
||||
/* and comment out this one */
|
||||
//row_array.sort(this.sorttable_sortfunction);
|
||||
|
||||
tb = this.sorttable_tbody;
|
||||
for (var j=0; j<row_array.length; j++) {
|
||||
tb.appendChild(row_array[j][1]);
|
||||
}
|
||||
|
||||
delete row_array;
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
guessType: function(table, column) {
|
||||
// guess the type of a column based on its first non-blank row
|
||||
sortfn = sorttable.sort_alpha;
|
||||
for (var i=0; i<table.tBodies[0].rows.length; i++) {
|
||||
text = sorttable.getInnerText(table.tBodies[0].rows[i].cells[column]);
|
||||
if (text != '') {
|
||||
if (text.match(/^-?[<5B>$<24>]?[\d,.]+%?$/)) {
|
||||
return sorttable.sort_numeric;
|
||||
}
|
||||
// check for a date: dd/mm/yyyy or dd/mm/yy
|
||||
// can have / or . or - as separator
|
||||
// can be mm/dd as well
|
||||
possdate = text.match(sorttable.DATE_RE)
|
||||
if (possdate) {
|
||||
// looks like a date
|
||||
first = parseInt(possdate[1]);
|
||||
second = parseInt(possdate[2]);
|
||||
if (first > 12) {
|
||||
// definitely dd/mm
|
||||
return sorttable.sort_ddmm;
|
||||
} else if (second > 12) {
|
||||
return sorttable.sort_mmdd;
|
||||
} else {
|
||||
// looks like a date, but we can't tell which, so assume
|
||||
// that it's dd/mm (English imperialism!) and keep looking
|
||||
sortfn = sorttable.sort_ddmm;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return sortfn;
|
||||
},
|
||||
|
||||
getInnerText: function(node) {
|
||||
// gets the text we want to use for sorting for a cell.
|
||||
// strips leading and trailing whitespace.
|
||||
// this is *not* a generic getInnerText function; it's special to sorttable.
|
||||
// for example, you can override the cell text with a customkey attribute.
|
||||
// it also gets .value for <input> fields.
|
||||
|
||||
hasInputs = (typeof node.getElementsByTagName == 'function') &&
|
||||
node.getElementsByTagName('input').length;
|
||||
|
||||
if (node.getAttribute("sorttable_customkey") != null) {
|
||||
return node.getAttribute("sorttable_customkey");
|
||||
}
|
||||
else if (typeof node.textContent != 'undefined' && !hasInputs) {
|
||||
return node.textContent.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
else if (typeof node.innerText != 'undefined' && !hasInputs) {
|
||||
return node.innerText.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
else if (typeof node.text != 'undefined' && !hasInputs) {
|
||||
return node.text.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
else {
|
||||
switch (node.nodeType) {
|
||||
case 3:
|
||||
if (node.nodeName.toLowerCase() == 'input') {
|
||||
return node.value.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
case 4:
|
||||
return node.nodeValue.replace(/^\s+|\s+$/g, '');
|
||||
break;
|
||||
case 1:
|
||||
case 11:
|
||||
var innerText = '';
|
||||
for (var i = 0; i < node.childNodes.length; i++) {
|
||||
innerText += sorttable.getInnerText(node.childNodes[i]);
|
||||
}
|
||||
return innerText.replace(/^\s+|\s+$/g, '');
|
||||
break;
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
reverse: function(tbody) {
|
||||
// reverse the rows in a tbody
|
||||
newrows = [];
|
||||
for (var i=0; i<tbody.rows.length; i++) {
|
||||
newrows[newrows.length] = tbody.rows[i];
|
||||
}
|
||||
for (var i=newrows.length-1; i>=0; i--) {
|
||||
tbody.appendChild(newrows[i]);
|
||||
}
|
||||
delete newrows;
|
||||
},
|
||||
|
||||
/* sort functions
|
||||
each sort function takes two parameters, a and b
|
||||
you are comparing a[0] and b[0] */
|
||||
sort_numeric: function(a,b) {
|
||||
aa = parseFloat(a[0].replace(/[^0-9.-]/g,''));
|
||||
if (isNaN(aa)) aa = 0;
|
||||
bb = parseFloat(b[0].replace(/[^0-9.-]/g,''));
|
||||
if (isNaN(bb)) bb = 0;
|
||||
return aa-bb;
|
||||
},
|
||||
sort_alpha: function(a,b) {
|
||||
if (a[0]==b[0]) return 0;
|
||||
if (a[0]<b[0]) return -1;
|
||||
return 1;
|
||||
},
|
||||
sort_ddmm: function(a,b) {
|
||||
mtch = a[0].match(sorttable.DATE_RE);
|
||||
y = mtch[3]; m = mtch[2]; d = mtch[1];
|
||||
if (m.length == 1) m = '0'+m;
|
||||
if (d.length == 1) d = '0'+d;
|
||||
dt1 = y+m+d;
|
||||
mtch = b[0].match(sorttable.DATE_RE);
|
||||
y = mtch[3]; m = mtch[2]; d = mtch[1];
|
||||
if (m.length == 1) m = '0'+m;
|
||||
if (d.length == 1) d = '0'+d;
|
||||
dt2 = y+m+d;
|
||||
if (dt1==dt2) return 0;
|
||||
if (dt1<dt2) return -1;
|
||||
return 1;
|
||||
},
|
||||
sort_mmdd: function(a,b) {
|
||||
mtch = a[0].match(sorttable.DATE_RE);
|
||||
y = mtch[3]; d = mtch[2]; m = mtch[1];
|
||||
if (m.length == 1) m = '0'+m;
|
||||
if (d.length == 1) d = '0'+d;
|
||||
dt1 = y+m+d;
|
||||
mtch = b[0].match(sorttable.DATE_RE);
|
||||
y = mtch[3]; d = mtch[2]; m = mtch[1];
|
||||
if (m.length == 1) m = '0'+m;
|
||||
if (d.length == 1) d = '0'+d;
|
||||
dt2 = y+m+d;
|
||||
if (dt1==dt2) return 0;
|
||||
if (dt1<dt2) return -1;
|
||||
return 1;
|
||||
},
|
||||
|
||||
shaker_sort: function(list, comp_func) {
|
||||
// A stable sort function to allow multi-level sorting of data
|
||||
// see: http://en.wikipedia.org/wiki/Cocktail_sort
|
||||
// thanks to Joseph Nahmias
|
||||
var b = 0;
|
||||
var t = list.length - 1;
|
||||
var swap = true;
|
||||
|
||||
while(swap) {
|
||||
swap = false;
|
||||
for(var i = b; i < t; ++i) {
|
||||
if ( comp_func(list[i], list[i+1]) > 0 ) {
|
||||
var q = list[i]; list[i] = list[i+1]; list[i+1] = q;
|
||||
swap = true;
|
||||
}
|
||||
} // for
|
||||
t--;
|
||||
|
||||
if (!swap) break;
|
||||
|
||||
for(var i = t; i > b; --i) {
|
||||
if ( comp_func(list[i], list[i-1]) < 0 ) {
|
||||
var q = list[i]; list[i] = list[i-1]; list[i-1] = q;
|
||||
swap = true;
|
||||
}
|
||||
} // for
|
||||
b++;
|
||||
|
||||
} // while(swap)
|
||||
}
|
||||
}
|
||||
|
||||
/* ******************************************************************
|
||||
Supporting functions: bundled here to avoid depending on a library
|
||||
****************************************************************** */
|
||||
|
||||
// Dean Edwards/Matthias Miller/John Resig
|
||||
|
||||
/* for Mozilla/Opera9 */
|
||||
if (document.addEventListener) {
|
||||
document.addEventListener("DOMContentLoaded", sorttable.init, false);
|
||||
}
|
||||
|
||||
/* for Internet Explorer */
|
||||
/*@cc_on @*/
|
||||
/*@if (@_win32)
|
||||
document.write("<script id=__ie_onload defer src=javascript:void(0)><\/script>");
|
||||
var script = document.getElementById("__ie_onload");
|
||||
script.onreadystatechange = function() {
|
||||
if (this.readyState == "complete") {
|
||||
sorttable.init(); // call the onload handler
|
||||
}
|
||||
};
|
||||
/*@end @*/
|
||||
|
||||
/* for Safari */
|
||||
if (/WebKit/i.test(navigator.userAgent)) { // sniff
|
||||
var _timer = setInterval(function() {
|
||||
if (/loaded|complete/.test(document.readyState)) {
|
||||
sorttable.init(); // call the onload handler
|
||||
}
|
||||
}, 10);
|
||||
}
|
||||
|
||||
/* for other browsers */
|
||||
window.onload = sorttable.init;
|
||||
|
||||
// written by Dean Edwards, 2005
|
||||
// with input from Tino Zijdel, Matthias Miller, Diego Perini
|
||||
|
||||
// http://dean.edwards.name/weblog/2005/10/add-event/
|
||||
|
||||
function dean_addEvent(element, type, handler) {
|
||||
if (element.addEventListener) {
|
||||
element.addEventListener(type, handler, false);
|
||||
} else {
|
||||
// assign each event handler a unique ID
|
||||
if (!handler.$$guid) handler.$$guid = dean_addEvent.guid++;
|
||||
// create a hash table of event types for the element
|
||||
if (!element.events) element.events = {};
|
||||
// create a hash table of event handlers for each element/event pair
|
||||
var handlers = element.events[type];
|
||||
if (!handlers) {
|
||||
handlers = element.events[type] = {};
|
||||
// store the existing event handler (if there is one)
|
||||
if (element["on" + type]) {
|
||||
handlers[0] = element["on" + type];
|
||||
}
|
||||
}
|
||||
// store the event handler in the hash table
|
||||
handlers[handler.$$guid] = handler;
|
||||
// assign a global event handler to do all the work
|
||||
element["on" + type] = handleEvent;
|
||||
}
|
||||
};
|
||||
// a counter used to create unique IDs
|
||||
dean_addEvent.guid = 1;
|
||||
|
||||
function removeEvent(element, type, handler) {
|
||||
if (element.removeEventListener) {
|
||||
element.removeEventListener(type, handler, false);
|
||||
} else {
|
||||
// delete the event handler from the hash table
|
||||
if (element.events && element.events[type]) {
|
||||
delete element.events[type][handler.$$guid];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function handleEvent(event) {
|
||||
var returnValue = true;
|
||||
// grab the event object (IE uses a global event object)
|
||||
event = event || fixEvent(((this.ownerDocument || this.document || this).parentWindow || window).event);
|
||||
// get a reference to the hash table of event handlers
|
||||
var handlers = this.events[event.type];
|
||||
// execute each event handler
|
||||
for (var i in handlers) {
|
||||
this.$$handleEvent = handlers[i];
|
||||
if (this.$$handleEvent(event) === false) {
|
||||
returnValue = false;
|
||||
}
|
||||
}
|
||||
return returnValue;
|
||||
};
|
||||
|
||||
function fixEvent(event) {
|
||||
// add W3C standard event methods
|
||||
event.preventDefault = fixEvent.preventDefault;
|
||||
event.stopPropagation = fixEvent.stopPropagation;
|
||||
return event;
|
||||
};
|
||||
fixEvent.preventDefault = function() {
|
||||
this.returnValue = false;
|
||||
};
|
||||
fixEvent.stopPropagation = function() {
|
||||
this.cancelBubble = true;
|
||||
}
|
||||
|
||||
// Dean's forEach: http://dean.edwards.name/base/forEach.js
|
||||
/*
|
||||
forEach, version 1.0
|
||||
Copyright 2006, Dean Edwards
|
||||
License: http://www.opensource.org/licenses/mit-license.php
|
||||
*/
|
||||
|
||||
// array-like enumeration
|
||||
if (!Array.forEach) { // mozilla already supports this
|
||||
Array.forEach = function(array, block, context) {
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
block.call(context, array[i], i, array);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// generic enumeration
|
||||
Function.prototype.forEach = function(object, block, context) {
|
||||
for (var key in object) {
|
||||
if (typeof this.prototype[key] == "undefined") {
|
||||
block.call(context, object[key], key, object);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// character enumeration
|
||||
String.forEach = function(string, block, context) {
|
||||
Array.forEach(string.split(""), function(chr, index) {
|
||||
block.call(context, chr, index, string);
|
||||
});
|
||||
};
|
||||
|
||||
// globally resolve forEach enumeration
|
||||
var forEach = function(object, block, context) {
|
||||
if (object) {
|
||||
var resolve = Object; // default
|
||||
if (object instanceof Function) {
|
||||
// functions have a "length" property
|
||||
resolve = Function;
|
||||
} else if (object.forEach instanceof Function) {
|
||||
// the object implements a custom forEach method so use that
|
||||
object.forEach(block, context);
|
||||
return;
|
||||
} else if (typeof object == "string") {
|
||||
// the object is a string
|
||||
resolve = String;
|
||||
} else if (typeof object.length == "number") {
|
||||
// the object is array-like
|
||||
resolve = Array;
|
||||
}
|
||||
resolve.forEach(object, block, context);
|
||||
}
|
||||
};
|
||||
@ -0,0 +1,226 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Methods for reporting bugs."""
|
||||
|
||||
import subprocess, sys, os
|
||||
|
||||
__all__ = ["ReportFailure", "BugReport", "getReporters"]
|
||||
|
||||
#
|
||||
|
||||
|
||||
class ReportFailure(Exception):
|
||||
"""Generic exception for failures in bug reporting."""
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
|
||||
# Collect information about a bug.
|
||||
|
||||
|
||||
class BugReport(object):
|
||||
def __init__(self, title, description, files):
|
||||
self.title = title
|
||||
self.description = description
|
||||
self.files = files
|
||||
|
||||
|
||||
# Reporter interfaces.
|
||||
|
||||
import os
|
||||
|
||||
import email, mimetypes, smtplib
|
||||
from email import encoders
|
||||
from email.message import Message
|
||||
from email.mime.base import MIMEBase
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
# ===------------------------------------------------------------------------===#
|
||||
# ReporterParameter
|
||||
# ===------------------------------------------------------------------------===#
|
||||
|
||||
|
||||
class ReporterParameter(object):
|
||||
def __init__(self, n):
|
||||
self.name = n
|
||||
|
||||
def getName(self):
|
||||
return self.name
|
||||
|
||||
def getValue(self, r, bugtype, getConfigOption):
|
||||
return getConfigOption(r.getName(), self.getName())
|
||||
|
||||
def saveConfigValue(self):
|
||||
return True
|
||||
|
||||
|
||||
class TextParameter(ReporterParameter):
|
||||
def getHTML(self, r, bugtype, getConfigOption):
|
||||
return """\
|
||||
<tr>
|
||||
<td class="form_clabel">%s:</td>
|
||||
<td class="form_value"><input type="text" name="%s_%s" value="%s"></td>
|
||||
</tr>""" % (
|
||||
self.getName(),
|
||||
r.getName(),
|
||||
self.getName(),
|
||||
self.getValue(r, bugtype, getConfigOption),
|
||||
)
|
||||
|
||||
|
||||
class SelectionParameter(ReporterParameter):
|
||||
def __init__(self, n, values):
|
||||
ReporterParameter.__init__(self, n)
|
||||
self.values = values
|
||||
|
||||
def getHTML(self, r, bugtype, getConfigOption):
|
||||
default = self.getValue(r, bugtype, getConfigOption)
|
||||
return """\
|
||||
<tr>
|
||||
<td class="form_clabel">%s:</td><td class="form_value"><select name="%s_%s">
|
||||
%s
|
||||
</select></td>""" % (
|
||||
self.getName(),
|
||||
r.getName(),
|
||||
self.getName(),
|
||||
"\n".join(
|
||||
[
|
||||
"""\
|
||||
<option value="%s"%s>%s</option>"""
|
||||
% (o[0], o[0] == default and ' selected="selected"' or "", o[1])
|
||||
for o in self.values
|
||||
]
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# ===------------------------------------------------------------------------===#
|
||||
# Reporters
|
||||
# ===------------------------------------------------------------------------===#
|
||||
|
||||
|
||||
class EmailReporter(object):
|
||||
def getName(self):
|
||||
return "Email"
|
||||
|
||||
def getParameters(self):
|
||||
return [TextParameter(x) for x in ["To", "From", "SMTP Server", "SMTP Port"]]
|
||||
|
||||
# Lifted from python email module examples.
|
||||
def attachFile(self, outer, path):
|
||||
# Guess the content type based on the file's extension. Encoding
|
||||
# will be ignored, although we should check for simple things like
|
||||
# gzip'd or compressed files.
|
||||
ctype, encoding = mimetypes.guess_type(path)
|
||||
if ctype is None or encoding is not None:
|
||||
# No guess could be made, or the file is encoded (compressed), so
|
||||
# use a generic bag-of-bits type.
|
||||
ctype = "application/octet-stream"
|
||||
maintype, subtype = ctype.split("/", 1)
|
||||
if maintype == "text":
|
||||
fp = open(path)
|
||||
# Note: we should handle calculating the charset
|
||||
msg = MIMEText(fp.read(), _subtype=subtype)
|
||||
fp.close()
|
||||
else:
|
||||
fp = open(path, "rb")
|
||||
msg = MIMEBase(maintype, subtype)
|
||||
msg.set_payload(fp.read())
|
||||
fp.close()
|
||||
# Encode the payload using Base64
|
||||
encoders.encode_base64(msg)
|
||||
# Set the filename parameter
|
||||
msg.add_header(
|
||||
"Content-Disposition", "attachment", filename=os.path.basename(path)
|
||||
)
|
||||
outer.attach(msg)
|
||||
|
||||
def fileReport(self, report, parameters):
|
||||
mainMsg = """\
|
||||
BUG REPORT
|
||||
---
|
||||
Title: %s
|
||||
Description: %s
|
||||
""" % (
|
||||
report.title,
|
||||
report.description,
|
||||
)
|
||||
|
||||
if not parameters.get("To"):
|
||||
raise ReportFailure('No "To" address specified.')
|
||||
if not parameters.get("From"):
|
||||
raise ReportFailure('No "From" address specified.')
|
||||
|
||||
msg = MIMEMultipart()
|
||||
msg["Subject"] = "BUG REPORT: %s" % (report.title)
|
||||
# FIXME: Get config parameters
|
||||
msg["To"] = parameters.get("To")
|
||||
msg["From"] = parameters.get("From")
|
||||
msg.preamble = mainMsg
|
||||
|
||||
msg.attach(MIMEText(mainMsg, _subtype="text/plain"))
|
||||
for file in report.files:
|
||||
self.attachFile(msg, file)
|
||||
|
||||
try:
|
||||
s = smtplib.SMTP(
|
||||
host=parameters.get("SMTP Server"), port=parameters.get("SMTP Port")
|
||||
)
|
||||
s.sendmail(msg["From"], msg["To"], msg.as_string())
|
||||
s.close()
|
||||
except:
|
||||
raise ReportFailure("Unable to send message via SMTP.")
|
||||
|
||||
return "Message sent!"
|
||||
|
||||
|
||||
class BugzillaReporter(object):
|
||||
def getName(self):
|
||||
return "Bugzilla"
|
||||
|
||||
def getParameters(self):
|
||||
return [TextParameter(x) for x in ["URL", "Product"]]
|
||||
|
||||
def fileReport(self, report, parameters):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RadarClassificationParameter(SelectionParameter):
|
||||
def __init__(self):
|
||||
SelectionParameter.__init__(
|
||||
self,
|
||||
"Classification",
|
||||
[
|
||||
["1", "Security"],
|
||||
["2", "Crash/Hang/Data Loss"],
|
||||
["3", "Performance"],
|
||||
["4", "UI/Usability"],
|
||||
["6", "Serious Bug"],
|
||||
["7", "Other"],
|
||||
],
|
||||
)
|
||||
|
||||
def saveConfigValue(self):
|
||||
return False
|
||||
|
||||
def getValue(self, r, bugtype, getConfigOption):
|
||||
if bugtype.find("leak") != -1:
|
||||
return "3"
|
||||
elif bugtype.find("dereference") != -1:
|
||||
return "2"
|
||||
elif bugtype.find("missing ivar release") != -1:
|
||||
return "3"
|
||||
else:
|
||||
return "7"
|
||||
|
||||
|
||||
###
|
||||
|
||||
|
||||
def getReporters():
|
||||
reporters = []
|
||||
reporters.append(EmailReporter())
|
||||
return reporters
|
||||
@ -0,0 +1,859 @@
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
||||
except ImportError:
|
||||
from BaseHTTPServer import HTTPServer
|
||||
from SimpleHTTPServer import SimpleHTTPRequestHandler
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
from urllib import unquote
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse, unquote
|
||||
|
||||
import posixpath
|
||||
|
||||
if sys.version_info.major >= 3:
|
||||
from io import StringIO, BytesIO
|
||||
else:
|
||||
from io import BytesIO, BytesIO as StringIO
|
||||
|
||||
import re
|
||||
import shutil
|
||||
import threading
|
||||
import time
|
||||
import socket
|
||||
import itertools
|
||||
|
||||
import Reporter
|
||||
|
||||
try:
|
||||
import configparser
|
||||
except ImportError:
|
||||
import ConfigParser as configparser
|
||||
|
||||
###
|
||||
# Various patterns matched or replaced by server.
|
||||
|
||||
kReportFileRE = re.compile("(.*/)?report-(.*)\\.html")
|
||||
|
||||
kBugKeyValueRE = re.compile("<!-- BUG([^ ]*) (.*) -->")
|
||||
|
||||
# <!-- REPORTPROBLEM file="crashes/clang_crash_ndSGF9.mi" stderr="crashes/clang_crash_ndSGF9.mi.stderr.txt" info="crashes/clang_crash_ndSGF9.mi.info" -->
|
||||
|
||||
kReportCrashEntryRE = re.compile("<!-- REPORTPROBLEM (.*?)-->")
|
||||
kReportCrashEntryKeyValueRE = re.compile(' ?([^=]+)="(.*?)"')
|
||||
|
||||
kReportReplacements = []
|
||||
|
||||
# Add custom javascript.
|
||||
kReportReplacements.append(
|
||||
(
|
||||
re.compile("<!-- SUMMARYENDHEAD -->"),
|
||||
"""\
|
||||
<script language="javascript" type="text/javascript">
|
||||
function load(url) {
|
||||
if (window.XMLHttpRequest) {
|
||||
req = new XMLHttpRequest();
|
||||
} else if (window.ActiveXObject) {
|
||||
req = new ActiveXObject("Microsoft.XMLHTTP");
|
||||
}
|
||||
if (req != undefined) {
|
||||
req.open("GET", url, true);
|
||||
req.send("");
|
||||
}
|
||||
}
|
||||
</script>""",
|
||||
)
|
||||
)
|
||||
|
||||
# Insert additional columns.
|
||||
kReportReplacements.append((re.compile("<!-- REPORTBUGCOL -->"), "<td></td><td></td>"))
|
||||
|
||||
# Insert report bug and open file links.
|
||||
kReportReplacements.append(
|
||||
(
|
||||
re.compile('<!-- REPORTBUG id="report-(.*)\\.html" -->'),
|
||||
(
|
||||
'<td class="Button"><a href="report/\\1">Report Bug</a></td>'
|
||||
+ '<td class="Button"><a href="javascript:load(\'open/\\1\')">Open File</a></td>'
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
kReportReplacements.append(
|
||||
(
|
||||
re.compile("<!-- REPORTHEADER -->"),
|
||||
'<h3><a href="/">Summary</a> > Report %(report)s</h3>',
|
||||
)
|
||||
)
|
||||
|
||||
kReportReplacements.append(
|
||||
(
|
||||
re.compile("<!-- REPORTSUMMARYEXTRA -->"),
|
||||
'<td class="Button"><a href="report/%(report)s">Report Bug</a></td>',
|
||||
)
|
||||
)
|
||||
|
||||
# Insert report crashes link.
|
||||
|
||||
# Disabled for the time being until we decide exactly when this should
|
||||
# be enabled. Also the radar reporter needs to be fixed to report
|
||||
# multiple files.
|
||||
|
||||
# kReportReplacements.append((re.compile('<!-- REPORTCRASHES -->'),
|
||||
# '<br>These files will automatically be attached to ' +
|
||||
# 'reports filed here: <a href="report_crashes">Report Crashes</a>.'))
|
||||
|
||||
###
|
||||
# Other simple parameters
|
||||
|
||||
kShare = posixpath.join(posixpath.dirname(__file__), "../share/scan-view")
|
||||
kConfigPath = os.path.expanduser("~/.scanview.cfg")
|
||||
|
||||
###
|
||||
|
||||
__version__ = "0.1"
|
||||
|
||||
__all__ = ["create_server"]
|
||||
|
||||
|
||||
class ReporterThread(threading.Thread):
|
||||
def __init__(self, report, reporter, parameters, server):
|
||||
threading.Thread.__init__(self)
|
||||
self.report = report
|
||||
self.server = server
|
||||
self.reporter = reporter
|
||||
self.parameters = parameters
|
||||
self.success = False
|
||||
self.status = None
|
||||
|
||||
def run(self):
|
||||
result = None
|
||||
try:
|
||||
if self.server.options.debug:
|
||||
print("%s: SERVER: submitting bug." % (sys.argv[0],), file=sys.stderr)
|
||||
self.status = self.reporter.fileReport(self.report, self.parameters)
|
||||
self.success = True
|
||||
time.sleep(3)
|
||||
if self.server.options.debug:
|
||||
print(
|
||||
"%s: SERVER: submission complete." % (sys.argv[0],), file=sys.stderr
|
||||
)
|
||||
except Reporter.ReportFailure as e:
|
||||
self.status = e.value
|
||||
except Exception as e:
|
||||
s = StringIO()
|
||||
import traceback
|
||||
|
||||
print("<b>Unhandled Exception</b><br><pre>", file=s)
|
||||
traceback.print_exc(file=s)
|
||||
print("</pre>", file=s)
|
||||
self.status = s.getvalue()
|
||||
|
||||
|
||||
class ScanViewServer(HTTPServer):
|
||||
def __init__(self, address, handler, root, reporters, options):
|
||||
HTTPServer.__init__(self, address, handler)
|
||||
self.root = root
|
||||
self.reporters = reporters
|
||||
self.options = options
|
||||
self.halted = False
|
||||
self.config = None
|
||||
self.load_config()
|
||||
|
||||
def load_config(self):
|
||||
self.config = configparser.RawConfigParser()
|
||||
|
||||
# Add defaults
|
||||
self.config.add_section("ScanView")
|
||||
for r in self.reporters:
|
||||
self.config.add_section(r.getName())
|
||||
for p in r.getParameters():
|
||||
if p.saveConfigValue():
|
||||
self.config.set(r.getName(), p.getName(), "")
|
||||
|
||||
# Ignore parse errors
|
||||
try:
|
||||
self.config.read([kConfigPath])
|
||||
except:
|
||||
pass
|
||||
|
||||
# Save on exit
|
||||
import atexit
|
||||
|
||||
atexit.register(lambda: self.save_config())
|
||||
|
||||
def save_config(self):
|
||||
# Ignore errors (only called on exit).
|
||||
try:
|
||||
f = open(kConfigPath, "w")
|
||||
self.config.write(f)
|
||||
f.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
def halt(self):
|
||||
self.halted = True
|
||||
if self.options.debug:
|
||||
print("%s: SERVER: halting." % (sys.argv[0],), file=sys.stderr)
|
||||
|
||||
def serve_forever(self):
|
||||
while not self.halted:
|
||||
if self.options.debug > 1:
|
||||
print("%s: SERVER: waiting..." % (sys.argv[0],), file=sys.stderr)
|
||||
try:
|
||||
self.handle_request()
|
||||
except OSError as e:
|
||||
print("OSError", e.errno)
|
||||
|
||||
def finish_request(self, request, client_address):
|
||||
if self.options.autoReload:
|
||||
import ScanView
|
||||
|
||||
self.RequestHandlerClass = reload(ScanView).ScanViewRequestHandler
|
||||
HTTPServer.finish_request(self, request, client_address)
|
||||
|
||||
def handle_error(self, request, client_address):
|
||||
# Ignore socket errors
|
||||
info = sys.exc_info()
|
||||
if info and isinstance(info[1], socket.error):
|
||||
if self.options.debug > 1:
|
||||
print(
|
||||
"%s: SERVER: ignored socket error." % (sys.argv[0],),
|
||||
file=sys.stderr,
|
||||
)
|
||||
return
|
||||
HTTPServer.handle_error(self, request, client_address)
|
||||
|
||||
|
||||
# Borrowed from Quixote, with simplifications.
|
||||
def parse_query(qs, fields=None):
|
||||
if fields is None:
|
||||
fields = {}
|
||||
for chunk in (_f for _f in qs.split("&") if _f):
|
||||
if "=" not in chunk:
|
||||
name = chunk
|
||||
value = ""
|
||||
else:
|
||||
name, value = chunk.split("=", 1)
|
||||
name = unquote(name.replace("+", " "))
|
||||
value = unquote(value.replace("+", " "))
|
||||
item = fields.get(name)
|
||||
if item is None:
|
||||
fields[name] = [value]
|
||||
else:
|
||||
item.append(value)
|
||||
return fields
|
||||
|
||||
|
||||
class ScanViewRequestHandler(SimpleHTTPRequestHandler):
|
||||
server_version = "ScanViewServer/" + __version__
|
||||
dynamic_mtime = time.time()
|
||||
|
||||
def do_HEAD(self):
|
||||
try:
|
||||
SimpleHTTPRequestHandler.do_HEAD(self)
|
||||
except Exception as e:
|
||||
self.handle_exception(e)
|
||||
|
||||
def do_GET(self):
|
||||
try:
|
||||
SimpleHTTPRequestHandler.do_GET(self)
|
||||
except Exception as e:
|
||||
self.handle_exception(e)
|
||||
|
||||
def do_POST(self):
|
||||
"""Serve a POST request."""
|
||||
try:
|
||||
length = self.headers.getheader("content-length") or "0"
|
||||
try:
|
||||
length = int(length)
|
||||
except:
|
||||
length = 0
|
||||
content = self.rfile.read(length)
|
||||
fields = parse_query(content)
|
||||
f = self.send_head(fields)
|
||||
if f:
|
||||
self.copyfile(f, self.wfile)
|
||||
f.close()
|
||||
except Exception as e:
|
||||
self.handle_exception(e)
|
||||
|
||||
def log_message(self, format, *args):
|
||||
if self.server.options.debug:
|
||||
sys.stderr.write(
|
||||
"%s: SERVER: %s - - [%s] %s\n"
|
||||
% (
|
||||
sys.argv[0],
|
||||
self.address_string(),
|
||||
self.log_date_time_string(),
|
||||
format % args,
|
||||
)
|
||||
)
|
||||
|
||||
def load_report(self, report):
|
||||
path = os.path.join(self.server.root, "report-%s.html" % report)
|
||||
data = open(path).read()
|
||||
keys = {}
|
||||
for item in kBugKeyValueRE.finditer(data):
|
||||
k, v = item.groups()
|
||||
keys[k] = v
|
||||
return keys
|
||||
|
||||
def load_crashes(self):
|
||||
path = posixpath.join(self.server.root, "index.html")
|
||||
data = open(path).read()
|
||||
problems = []
|
||||
for item in kReportCrashEntryRE.finditer(data):
|
||||
fieldData = item.group(1)
|
||||
fields = dict(
|
||||
[i.groups() for i in kReportCrashEntryKeyValueRE.finditer(fieldData)]
|
||||
)
|
||||
problems.append(fields)
|
||||
return problems
|
||||
|
||||
def handle_exception(self, exc):
|
||||
import traceback
|
||||
|
||||
s = StringIO()
|
||||
print("INTERNAL ERROR\n", file=s)
|
||||
traceback.print_exc(file=s)
|
||||
f = self.send_string(s.getvalue(), "text/plain")
|
||||
if f:
|
||||
self.copyfile(f, self.wfile)
|
||||
f.close()
|
||||
|
||||
def get_scalar_field(self, name):
|
||||
if name in self.fields:
|
||||
return self.fields[name][0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def submit_bug(self, c):
|
||||
title = self.get_scalar_field("title")
|
||||
description = self.get_scalar_field("description")
|
||||
report = self.get_scalar_field("report")
|
||||
reporterIndex = self.get_scalar_field("reporter")
|
||||
files = []
|
||||
for fileID in self.fields.get("files", []):
|
||||
try:
|
||||
i = int(fileID)
|
||||
except:
|
||||
i = None
|
||||
if i is None or i < 0 or i >= len(c.files):
|
||||
return (False, "Invalid file ID")
|
||||
files.append(c.files[i])
|
||||
|
||||
if not title:
|
||||
return (False, "Missing title.")
|
||||
if not description:
|
||||
return (False, "Missing description.")
|
||||
try:
|
||||
reporterIndex = int(reporterIndex)
|
||||
except:
|
||||
return (False, "Invalid report method.")
|
||||
|
||||
# Get the reporter and parameters.
|
||||
reporter = self.server.reporters[reporterIndex]
|
||||
parameters = {}
|
||||
for o in reporter.getParameters():
|
||||
name = "%s_%s" % (reporter.getName(), o.getName())
|
||||
if name not in self.fields:
|
||||
return (
|
||||
False,
|
||||
'Missing field "%s" for %s report method.'
|
||||
% (name, reporter.getName()),
|
||||
)
|
||||
parameters[o.getName()] = self.get_scalar_field(name)
|
||||
|
||||
# Update config defaults.
|
||||
if report != "None":
|
||||
self.server.config.set("ScanView", "reporter", reporterIndex)
|
||||
for o in reporter.getParameters():
|
||||
if o.saveConfigValue():
|
||||
name = o.getName()
|
||||
self.server.config.set(reporter.getName(), name, parameters[name])
|
||||
|
||||
# Create the report.
|
||||
bug = Reporter.BugReport(title, description, files)
|
||||
|
||||
# Kick off a reporting thread.
|
||||
t = ReporterThread(bug, reporter, parameters, self.server)
|
||||
t.start()
|
||||
|
||||
# Wait for thread to die...
|
||||
while t.isAlive():
|
||||
time.sleep(0.25)
|
||||
submitStatus = t.status
|
||||
|
||||
return (t.success, t.status)
|
||||
|
||||
def send_report_submit(self):
|
||||
report = self.get_scalar_field("report")
|
||||
c = self.get_report_context(report)
|
||||
if c.reportSource is None:
|
||||
reportingFor = "Report Crashes > "
|
||||
fileBug = (
|
||||
"""\
|
||||
<a href="/report_crashes">File Bug</a> > """
|
||||
% locals()
|
||||
)
|
||||
else:
|
||||
reportingFor = '<a href="/%s">Report %s</a> > ' % (c.reportSource, report)
|
||||
fileBug = '<a href="/report/%s">File Bug</a> > ' % report
|
||||
title = self.get_scalar_field("title")
|
||||
description = self.get_scalar_field("description")
|
||||
|
||||
res, message = self.submit_bug(c)
|
||||
|
||||
if res:
|
||||
statusClass = "SubmitOk"
|
||||
statusName = "Succeeded"
|
||||
else:
|
||||
statusClass = "SubmitFail"
|
||||
statusName = "Failed"
|
||||
|
||||
result = (
|
||||
"""
|
||||
<head>
|
||||
<title>Bug Submission</title>
|
||||
<link rel="stylesheet" type="text/css" href="/scanview.css" />
|
||||
</head>
|
||||
<body>
|
||||
<h3>
|
||||
<a href="/">Summary</a> >
|
||||
%(reportingFor)s
|
||||
%(fileBug)s
|
||||
Submit</h3>
|
||||
<form name="form" action="">
|
||||
<table class="form">
|
||||
<tr><td>
|
||||
<table class="form_group">
|
||||
<tr>
|
||||
<td class="form_clabel">Title:</td>
|
||||
<td class="form_value">
|
||||
<input type="text" name="title" size="50" value="%(title)s" disabled>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="form_label">Description:</td>
|
||||
<td class="form_value">
|
||||
<textarea rows="10" cols="80" name="description" disabled>
|
||||
%(description)s
|
||||
</textarea>
|
||||
</td>
|
||||
</table>
|
||||
</td></tr>
|
||||
</table>
|
||||
</form>
|
||||
<h1 class="%(statusClass)s">Submission %(statusName)s</h1>
|
||||
%(message)s
|
||||
<p>
|
||||
<hr>
|
||||
<a href="/">Return to Summary</a>
|
||||
</body>
|
||||
</html>"""
|
||||
% locals()
|
||||
)
|
||||
return self.send_string(result)
|
||||
|
||||
def send_open_report(self, report):
|
||||
try:
|
||||
keys = self.load_report(report)
|
||||
except IOError:
|
||||
return self.send_error(400, "Invalid report.")
|
||||
|
||||
file = keys.get("FILE")
|
||||
if not file or not posixpath.exists(file):
|
||||
return self.send_error(400, 'File does not exist: "%s"' % file)
|
||||
|
||||
import startfile
|
||||
|
||||
if self.server.options.debug:
|
||||
print('%s: SERVER: opening "%s"' % (sys.argv[0], file), file=sys.stderr)
|
||||
|
||||
status = startfile.open(file)
|
||||
if status:
|
||||
res = 'Opened: "%s"' % file
|
||||
else:
|
||||
res = 'Open failed: "%s"' % file
|
||||
|
||||
return self.send_string(res, "text/plain")
|
||||
|
||||
def get_report_context(self, report):
|
||||
class Context(object):
|
||||
pass
|
||||
|
||||
if report is None or report == "None":
|
||||
data = self.load_crashes()
|
||||
# Don't allow empty reports.
|
||||
if not data:
|
||||
raise ValueError("No crashes detected!")
|
||||
c = Context()
|
||||
c.title = "clang static analyzer failures"
|
||||
|
||||
stderrSummary = ""
|
||||
for item in data:
|
||||
if "stderr" in item:
|
||||
path = posixpath.join(self.server.root, item["stderr"])
|
||||
if os.path.exists(path):
|
||||
lns = itertools.islice(open(path), 0, 10)
|
||||
stderrSummary += "%s\n--\n%s" % (
|
||||
item.get("src", "<unknown>"),
|
||||
"".join(lns),
|
||||
)
|
||||
|
||||
c.description = """\
|
||||
The clang static analyzer failed on these inputs:
|
||||
%s
|
||||
|
||||
STDERR Summary
|
||||
--------------
|
||||
%s
|
||||
""" % (
|
||||
"\n".join([item.get("src", "<unknown>") for item in data]),
|
||||
stderrSummary,
|
||||
)
|
||||
c.reportSource = None
|
||||
c.navMarkup = "Report Crashes > "
|
||||
c.files = []
|
||||
for item in data:
|
||||
c.files.append(item.get("src", ""))
|
||||
c.files.append(posixpath.join(self.server.root, item.get("file", "")))
|
||||
c.files.append(
|
||||
posixpath.join(self.server.root, item.get("clangfile", ""))
|
||||
)
|
||||
c.files.append(posixpath.join(self.server.root, item.get("stderr", "")))
|
||||
c.files.append(posixpath.join(self.server.root, item.get("info", "")))
|
||||
# Just in case something failed, ignore files which don't
|
||||
# exist.
|
||||
c.files = [f for f in c.files if os.path.exists(f) and os.path.isfile(f)]
|
||||
else:
|
||||
# Check that this is a valid report.
|
||||
path = posixpath.join(self.server.root, "report-%s.html" % report)
|
||||
if not posixpath.exists(path):
|
||||
raise ValueError("Invalid report ID")
|
||||
keys = self.load_report(report)
|
||||
c = Context()
|
||||
c.title = keys.get("DESC", "clang error (unrecognized")
|
||||
c.description = """\
|
||||
Bug reported by the clang static analyzer.
|
||||
|
||||
Description: %s
|
||||
File: %s
|
||||
Line: %s
|
||||
""" % (
|
||||
c.title,
|
||||
keys.get("FILE", "<unknown>"),
|
||||
keys.get("LINE", "<unknown>"),
|
||||
)
|
||||
c.reportSource = "report-%s.html" % report
|
||||
c.navMarkup = """<a href="/%s">Report %s</a> > """ % (
|
||||
c.reportSource,
|
||||
report,
|
||||
)
|
||||
|
||||
c.files = [path]
|
||||
return c
|
||||
|
||||
def send_report(self, report, configOverrides=None):
|
||||
def getConfigOption(section, field):
|
||||
if (
|
||||
configOverrides is not None
|
||||
and section in configOverrides
|
||||
and field in configOverrides[section]
|
||||
):
|
||||
return configOverrides[section][field]
|
||||
return self.server.config.get(section, field)
|
||||
|
||||
# report is None is used for crashes
|
||||
try:
|
||||
c = self.get_report_context(report)
|
||||
except ValueError as e:
|
||||
return self.send_error(400, e.message)
|
||||
|
||||
title = c.title
|
||||
description = c.description
|
||||
reportingFor = c.navMarkup
|
||||
if c.reportSource is None:
|
||||
extraIFrame = ""
|
||||
else:
|
||||
extraIFrame = """\
|
||||
<iframe src="/%s" width="100%%" height="40%%"
|
||||
scrolling="auto" frameborder="1">
|
||||
<a href="/%s">View Bug Report</a>
|
||||
</iframe>""" % (
|
||||
c.reportSource,
|
||||
c.reportSource,
|
||||
)
|
||||
|
||||
reporterSelections = []
|
||||
reporterOptions = []
|
||||
|
||||
try:
|
||||
active = int(getConfigOption("ScanView", "reporter"))
|
||||
except:
|
||||
active = 0
|
||||
for i, r in enumerate(self.server.reporters):
|
||||
selected = i == active
|
||||
if selected:
|
||||
selectedStr = " selected"
|
||||
else:
|
||||
selectedStr = ""
|
||||
reporterSelections.append(
|
||||
'<option value="%d"%s>%s</option>' % (i, selectedStr, r.getName())
|
||||
)
|
||||
options = "\n".join(
|
||||
[o.getHTML(r, title, getConfigOption) for o in r.getParameters()]
|
||||
)
|
||||
display = ("none", "")[selected]
|
||||
reporterOptions.append(
|
||||
"""\
|
||||
<tr id="%sReporterOptions" style="display:%s">
|
||||
<td class="form_label">%s Options</td>
|
||||
<td class="form_value">
|
||||
<table class="form_inner_group">
|
||||
%s
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
"""
|
||||
% (r.getName(), display, r.getName(), options)
|
||||
)
|
||||
reporterSelections = "\n".join(reporterSelections)
|
||||
reporterOptionsDivs = "\n".join(reporterOptions)
|
||||
reportersArray = "[%s]" % (
|
||||
",".join([repr(r.getName()) for r in self.server.reporters])
|
||||
)
|
||||
|
||||
if c.files:
|
||||
fieldSize = min(5, len(c.files))
|
||||
attachFileOptions = "\n".join(
|
||||
[
|
||||
"""\
|
||||
<option value="%d" selected>%s</option>"""
|
||||
% (i, v)
|
||||
for i, v in enumerate(c.files)
|
||||
]
|
||||
)
|
||||
attachFileRow = """\
|
||||
<tr>
|
||||
<td class="form_label">Attach:</td>
|
||||
<td class="form_value">
|
||||
<select style="width:100%%" name="files" multiple size=%d>
|
||||
%s
|
||||
</select>
|
||||
</td>
|
||||
</tr>
|
||||
""" % (
|
||||
min(5, len(c.files)),
|
||||
attachFileOptions,
|
||||
)
|
||||
else:
|
||||
attachFileRow = ""
|
||||
|
||||
result = (
|
||||
"""<html>
|
||||
<head>
|
||||
<title>File Bug</title>
|
||||
<link rel="stylesheet" type="text/css" href="/scanview.css" />
|
||||
</head>
|
||||
<script language="javascript" type="text/javascript">
|
||||
var reporters = %(reportersArray)s;
|
||||
function updateReporterOptions() {
|
||||
index = document.getElementById('reporter').selectedIndex;
|
||||
for (var i=0; i < reporters.length; ++i) {
|
||||
o = document.getElementById(reporters[i] + "ReporterOptions");
|
||||
if (i == index) {
|
||||
o.style.display = "";
|
||||
} else {
|
||||
o.style.display = "none";
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<body onLoad="updateReporterOptions()">
|
||||
<h3>
|
||||
<a href="/">Summary</a> >
|
||||
%(reportingFor)s
|
||||
File Bug</h3>
|
||||
<form name="form" action="/report_submit" method="post">
|
||||
<input type="hidden" name="report" value="%(report)s">
|
||||
|
||||
<table class="form">
|
||||
<tr><td>
|
||||
<table class="form_group">
|
||||
<tr>
|
||||
<td class="form_clabel">Title:</td>
|
||||
<td class="form_value">
|
||||
<input type="text" name="title" size="50" value="%(title)s">
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="form_label">Description:</td>
|
||||
<td class="form_value">
|
||||
<textarea rows="10" cols="80" name="description">
|
||||
%(description)s
|
||||
</textarea>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
%(attachFileRow)s
|
||||
|
||||
</table>
|
||||
<br>
|
||||
<table class="form_group">
|
||||
<tr>
|
||||
<td class="form_clabel">Method:</td>
|
||||
<td class="form_value">
|
||||
<select id="reporter" name="reporter" onChange="updateReporterOptions()">
|
||||
%(reporterSelections)s
|
||||
</select>
|
||||
</td>
|
||||
</tr>
|
||||
%(reporterOptionsDivs)s
|
||||
</table>
|
||||
<br>
|
||||
</td></tr>
|
||||
<tr><td class="form_submit">
|
||||
<input align="right" type="submit" name="Submit" value="Submit">
|
||||
</td></tr>
|
||||
</table>
|
||||
</form>
|
||||
|
||||
%(extraIFrame)s
|
||||
|
||||
</body>
|
||||
</html>"""
|
||||
% locals()
|
||||
)
|
||||
|
||||
return self.send_string(result)
|
||||
|
||||
def send_head(self, fields=None):
|
||||
if self.server.options.onlyServeLocal and self.client_address[0] != "127.0.0.1":
|
||||
return self.send_error(401, "Unauthorized host.")
|
||||
|
||||
if fields is None:
|
||||
fields = {}
|
||||
self.fields = fields
|
||||
|
||||
o = urlparse(self.path)
|
||||
self.fields = parse_query(o.query, fields)
|
||||
path = posixpath.normpath(unquote(o.path))
|
||||
|
||||
# Split the components and strip the root prefix.
|
||||
components = path.split("/")[1:]
|
||||
|
||||
# Special case some top-level entries.
|
||||
if components:
|
||||
name = components[0]
|
||||
if len(components) == 2:
|
||||
if name == "report":
|
||||
return self.send_report(components[1])
|
||||
elif name == "open":
|
||||
return self.send_open_report(components[1])
|
||||
elif len(components) == 1:
|
||||
if name == "quit":
|
||||
self.server.halt()
|
||||
return self.send_string("Goodbye.", "text/plain")
|
||||
elif name == "report_submit":
|
||||
return self.send_report_submit()
|
||||
elif name == "report_crashes":
|
||||
overrides = {"ScanView": {}, "Radar": {}, "Email": {}}
|
||||
for i, r in enumerate(self.server.reporters):
|
||||
if r.getName() == "Radar":
|
||||
overrides["ScanView"]["reporter"] = i
|
||||
break
|
||||
overrides["Radar"]["Component"] = "llvm - checker"
|
||||
overrides["Radar"]["Component Version"] = "X"
|
||||
return self.send_report(None, overrides)
|
||||
elif name == "favicon.ico":
|
||||
return self.send_path(posixpath.join(kShare, "bugcatcher.ico"))
|
||||
|
||||
# Match directory entries.
|
||||
if components[-1] == "":
|
||||
components[-1] = "index.html"
|
||||
|
||||
relpath = "/".join(components)
|
||||
path = posixpath.join(self.server.root, relpath)
|
||||
|
||||
if self.server.options.debug > 1:
|
||||
print(
|
||||
'%s: SERVER: sending path "%s"' % (sys.argv[0], path), file=sys.stderr
|
||||
)
|
||||
return self.send_path(path)
|
||||
|
||||
def send_404(self):
|
||||
self.send_error(404, "File not found")
|
||||
return None
|
||||
|
||||
def send_path(self, path):
|
||||
# If the requested path is outside the root directory, do not open it
|
||||
rel = os.path.abspath(path)
|
||||
if not rel.startswith(os.path.abspath(self.server.root)):
|
||||
return self.send_404()
|
||||
|
||||
ctype = self.guess_type(path)
|
||||
if ctype.startswith("text/"):
|
||||
# Patch file instead
|
||||
return self.send_patched_file(path, ctype)
|
||||
else:
|
||||
mode = "rb"
|
||||
try:
|
||||
f = open(path, mode)
|
||||
except IOError:
|
||||
return self.send_404()
|
||||
return self.send_file(f, ctype)
|
||||
|
||||
def send_file(self, f, ctype):
|
||||
# Patch files to add links, but skip binary files.
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", ctype)
|
||||
fs = os.fstat(f.fileno())
|
||||
self.send_header("Content-Length", str(fs[6]))
|
||||
self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
|
||||
self.end_headers()
|
||||
return f
|
||||
|
||||
def send_string(self, s, ctype="text/html", headers=True, mtime=None):
|
||||
encoded_s = s.encode("utf-8")
|
||||
if headers:
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", ctype)
|
||||
self.send_header("Content-Length", str(len(encoded_s)))
|
||||
if mtime is None:
|
||||
mtime = self.dynamic_mtime
|
||||
self.send_header("Last-Modified", self.date_time_string(mtime))
|
||||
self.end_headers()
|
||||
return BytesIO(encoded_s)
|
||||
|
||||
def send_patched_file(self, path, ctype):
|
||||
# Allow a very limited set of variables. This is pretty gross.
|
||||
variables = {}
|
||||
variables["report"] = ""
|
||||
m = kReportFileRE.match(path)
|
||||
if m:
|
||||
variables["report"] = m.group(2)
|
||||
|
||||
try:
|
||||
f = open(path, "rb")
|
||||
except IOError:
|
||||
return self.send_404()
|
||||
fs = os.fstat(f.fileno())
|
||||
data = f.read().decode("utf-8")
|
||||
for a, b in kReportReplacements:
|
||||
data = a.sub(b % variables, data)
|
||||
return self.send_string(data, ctype, mtime=fs.st_mtime)
|
||||
|
||||
|
||||
def create_server(address, options, root):
|
||||
import Reporter
|
||||
|
||||
reporters = Reporter.getReporters()
|
||||
|
||||
return ScanViewServer(address, ScanViewRequestHandler, root, reporters, options)
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 318 B |
@ -0,0 +1,216 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Utility for opening a file using the default application in a cross-platform
|
||||
manner. Modified from http://code.activestate.com/recipes/511443/.
|
||||
"""
|
||||
|
||||
__version__ = "1.1x"
|
||||
__all__ = ["open"]
|
||||
|
||||
import os
|
||||
import sys
|
||||
import webbrowser
|
||||
import subprocess
|
||||
|
||||
_controllers = {}
|
||||
_open = None
|
||||
|
||||
|
||||
class BaseController(object):
|
||||
"""Base class for open program controllers."""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def open(self, filename):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Controller(BaseController):
|
||||
"""Controller for a generic open program."""
|
||||
|
||||
def __init__(self, *args):
|
||||
super(Controller, self).__init__(os.path.basename(args[0]))
|
||||
self.args = list(args)
|
||||
|
||||
def _invoke(self, cmdline):
|
||||
if sys.platform[:3] == "win":
|
||||
closefds = False
|
||||
startupinfo = subprocess.STARTUPINFO()
|
||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
||||
else:
|
||||
closefds = True
|
||||
startupinfo = None
|
||||
|
||||
if (
|
||||
os.environ.get("DISPLAY")
|
||||
or sys.platform[:3] == "win"
|
||||
or sys.platform == "darwin"
|
||||
):
|
||||
inout = file(os.devnull, "r+")
|
||||
else:
|
||||
# for TTY programs, we need stdin/out
|
||||
inout = None
|
||||
|
||||
# if possible, put the child precess in separate process group,
|
||||
# so keyboard interrupts don't affect child precess as well as
|
||||
# Python
|
||||
setsid = getattr(os, "setsid", None)
|
||||
if not setsid:
|
||||
setsid = getattr(os, "setpgrp", None)
|
||||
|
||||
pipe = subprocess.Popen(
|
||||
cmdline,
|
||||
stdin=inout,
|
||||
stdout=inout,
|
||||
stderr=inout,
|
||||
close_fds=closefds,
|
||||
preexec_fn=setsid,
|
||||
startupinfo=startupinfo,
|
||||
)
|
||||
|
||||
# It is assumed that this kind of tools (gnome-open, kfmclient,
|
||||
# exo-open, xdg-open and open for OSX) immediately exit after launching
|
||||
# the specific application
|
||||
returncode = pipe.wait()
|
||||
if hasattr(self, "fixreturncode"):
|
||||
returncode = self.fixreturncode(returncode)
|
||||
return not returncode
|
||||
|
||||
def open(self, filename):
|
||||
if isinstance(filename, basestring):
|
||||
cmdline = self.args + [filename]
|
||||
else:
|
||||
# assume it is a sequence
|
||||
cmdline = self.args + filename
|
||||
try:
|
||||
return self._invoke(cmdline)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
# Platform support for Windows
|
||||
if sys.platform[:3] == "win":
|
||||
|
||||
class Start(BaseController):
|
||||
"""Controller for the win32 start program through os.startfile."""
|
||||
|
||||
def open(self, filename):
|
||||
try:
|
||||
os.startfile(filename)
|
||||
except WindowsError:
|
||||
# [Error 22] No application is associated with the specified
|
||||
# file for this operation: '<URL>'
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
_controllers["windows-default"] = Start("start")
|
||||
_open = _controllers["windows-default"].open
|
||||
|
||||
|
||||
# Platform support for MacOS
|
||||
elif sys.platform == "darwin":
|
||||
_controllers["open"] = Controller("open")
|
||||
_open = _controllers["open"].open
|
||||
|
||||
|
||||
# Platform support for Unix
|
||||
else:
|
||||
|
||||
try:
|
||||
from commands import getoutput
|
||||
except ImportError:
|
||||
from subprocess import getoutput
|
||||
|
||||
# @WARNING: use the private API of the webbrowser module
|
||||
from webbrowser import _iscommand
|
||||
|
||||
class KfmClient(Controller):
|
||||
"""Controller for the KDE kfmclient program."""
|
||||
|
||||
def __init__(self, kfmclient="kfmclient"):
|
||||
super(KfmClient, self).__init__(kfmclient, "exec")
|
||||
self.kde_version = self.detect_kde_version()
|
||||
|
||||
def detect_kde_version(self):
|
||||
kde_version = None
|
||||
try:
|
||||
info = getoutput("kde-config --version")
|
||||
|
||||
for line in info.splitlines():
|
||||
if line.startswith("KDE"):
|
||||
kde_version = line.split(":")[-1].strip()
|
||||
break
|
||||
except (OSError, RuntimeError):
|
||||
pass
|
||||
|
||||
return kde_version
|
||||
|
||||
def fixreturncode(self, returncode):
|
||||
if returncode is not None and self.kde_version > "3.5.4":
|
||||
return returncode
|
||||
else:
|
||||
return os.EX_OK
|
||||
|
||||
def detect_desktop_environment():
|
||||
"""Checks for known desktop environments
|
||||
|
||||
Return the desktop environments name, lowercase (kde, gnome, xfce)
|
||||
or "generic"
|
||||
|
||||
"""
|
||||
|
||||
desktop_environment = "generic"
|
||||
|
||||
if os.environ.get("KDE_FULL_SESSION") == "true":
|
||||
desktop_environment = "kde"
|
||||
elif os.environ.get("GNOME_DESKTOP_SESSION_ID"):
|
||||
desktop_environment = "gnome"
|
||||
else:
|
||||
try:
|
||||
info = getoutput("xprop -root _DT_SAVE_MODE")
|
||||
if ' = "xfce4"' in info:
|
||||
desktop_environment = "xfce"
|
||||
except (OSError, RuntimeError):
|
||||
pass
|
||||
|
||||
return desktop_environment
|
||||
|
||||
def register_X_controllers():
|
||||
if _iscommand("kfmclient"):
|
||||
_controllers["kde-open"] = KfmClient()
|
||||
|
||||
for command in ("gnome-open", "exo-open", "xdg-open"):
|
||||
if _iscommand(command):
|
||||
_controllers[command] = Controller(command)
|
||||
|
||||
def get():
|
||||
controllers_map = {
|
||||
"gnome": "gnome-open",
|
||||
"kde": "kde-open",
|
||||
"xfce": "exo-open",
|
||||
}
|
||||
|
||||
desktop_environment = detect_desktop_environment()
|
||||
|
||||
try:
|
||||
controller_name = controllers_map[desktop_environment]
|
||||
return _controllers[controller_name].open
|
||||
|
||||
except KeyError:
|
||||
if "xdg-open" in _controllers:
|
||||
return _controllers["xdg-open"].open
|
||||
else:
|
||||
return webbrowser.open
|
||||
|
||||
if os.environ.get("DISPLAY"):
|
||||
register_X_controllers()
|
||||
_open = get()
|
||||
|
||||
|
||||
def open(filename):
|
||||
"""Open a file or a URL in the registered default application."""
|
||||
|
||||
return _open(filename)
|
||||
Reference in New Issue
Block a user