Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Berk Onat
python-common
Commits
0c5adafd
Commit
0c5adafd
authored
Nov 11, 2015
by
Mohamed, Fawzi Roberto (fawzi)
Browse files
initial commit
parents
Changes
13
Expand all
Hide whitespace changes
Inline
Side-by-side
python/nomadcore/__init__.py
0 → 100644
View file @
0c5adafd
python/nomadcore/basic_meta_info.py
0 → 100644
View file @
0c5adafd
import
os
,
logging
from
nomadcore.local_meta_info
import
InfoKindEl
,
InfoKindEnv
,
loadJsonFile
logger
=
logging
.
getLogger
(
__name__
)
baseDir
=
os
.
path
.
dirname
(
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
)))
metaInfoDir
=
os
.
path
.
normpath
(
os
.
path
.
join
(
baseDir
,
"../nomad_meta_info"
))
allMetaInfo
=
{}
for
dirpath
,
dirnames
,
filenames
in
os
.
walk
(
metaInfoDir
):
for
filename
in
filenames
:
if
not
filename
.
endswith
(
".nomadmetainfo.json"
):
continue
filepath
=
os
.
path
.
join
(
dirpath
,
filename
)
try
:
newEnv
,
warnings
=
loadJsonFile
(
filepath
,
dependencyLoader
=
None
,
extraArgsHandling
=
InfoKindEl
.
ADD_EXTRA_ARGS
,
uri
=
None
)
if
warnings
.
get
(
"hasWarnings"
,
True
):
logger
.
warn
(
"loading %s had the following warnings: %s"
%
(
filepath
,
warnings
))
allMetaInfo
[
filepath
]
=
newEnv
except
:
logger
.
exception
(
"nomadcore.basic_meta_info could not load %s"
,
filepath
)
baseMetaInfo
=
allMetaInfo
[
os
.
path
.
join
(
metaInfoDir
,
"nomad_base.nomadmetainfo.json"
)]
metaMetaInfo
=
allMetaInfo
[
os
.
path
.
join
(
metaInfoDir
,
"meta_types.nomadmetainfo.json"
)]
python/nomadcore/compact_sha.py
0 → 100644
View file @
0c5adafd
import
hashlib
import
base64
class
CompactHash
(
object
):
def
__init__
(
self
,
proto
):
self
.
_proto
=
proto
def
b64digest
(
self
):
return
base64
.
b64encode
(
self
.
digest
(),
"-_"
)[:
-
2
]
def
b32digest
(
self
):
res
=
base64
.
b32encode
(
self
.
digest
())
return
res
[:
res
.
index
(
'='
)]
def
update
(
self
,
data
):
if
type
(
data
)
==
unicode
:
data
=
data
.
encode
(
"utf-8"
)
return
self
.
_proto
.
update
(
data
)
def
__getattr__
(
self
,
name
):
return
getattr
(
self
.
_proto
,
name
)
def
sha224
(
*
args
,
**
kwargs
):
return
CompactHash
(
hashlib
.
sha224
(
*
args
,
**
kwargs
))
def
sha512
(
*
args
,
**
kwargs
):
return
CompactHash
(
hashlib
.
sha512
(
*
args
,
**
kwargs
))
def
md5
(
*
args
,
**
kwargs
):
return
CompactHash
(
hashlib
.
md5
(
*
args
,
**
kwargs
))
python/nomadcore/json_support.py
0 → 100644
View file @
0c5adafd
import
json
"""Various functions to simplify and standardize dumping objects to json"""
class
ExtraIndenter
(
object
):
"""Helper class to add extra indent at the beginning of every line"""
def
__init__
(
self
,
fStream
,
extraIndent
):
self
.
fStream
=
fStream
self
.
indent
=
" "
*
extraIndent
if
extraIndent
else
""
def
write
(
self
,
val
):
i
=
0
while
True
:
j
=
val
.
find
(
"
\n
"
,
i
)
if
j
==
-
1
:
fStream
.
write
(
val
[
i
:])
return
j
+=
1
fStream
.
write
(
val
[
i
:
j
])
fStream
.
write
(
self
.
indent
)
i
=
j
def
jsonCompactF
(
obj
,
fOut
,
check_circular
=
False
):
"""Dumps the object obj with a compact json representation using the utf_8 encoding
to the file stream fOut"""
json
.
dump
(
obj
,
fOut
,
sort_keys
=
True
,
indent
=
None
,
separators
=
(
','
,
':'
),
ensure_ascii
=
False
,
check_circular
=
check_circular
)
def
jsonIndentF
(
obj
,
fOut
,
check_circular
=
False
,
extraIndent
=
None
):
"""Dumps the object obj with an indented json representation using the utf_8 encoding
to the file stream fOut"""
fStream
=
fOut
if
extraIndent
:
fStream
=
ExtraIndenter
(
fOut
,
extraIndent
=
extraIndent
)
json
.
dump
(
obj
,
fStream
,
sort_keys
=
True
,
indent
=
2
,
separators
=
(
','
,
': '
),
ensure_ascii
=
False
,
check_circular
=
check_circular
)
class
DumpToStream
(
object
):
"""transform a dump function in a stream"""
def
__init__
(
self
,
dumpF
,
extraIndent
=
None
):
self
.
baseDumpF
=
dumpF
self
.
extraIndent
=
extraIndent
self
.
indent
=
" "
*
extraIndent
if
extraIndent
else
""
self
.
dumpF
=
self
.
dumpIndented
if
extraIndent
else
dumpF
def
dumpIndented
(
self
,
val
):
if
type
(
val
)
==
unicode
:
val
=
val
.
encode
(
"utf_8"
)
i
=
0
while
True
:
j
=
val
.
find
(
"
\n
"
,
i
)
if
j
==
-
1
:
self
.
baseDumpF
(
val
[
i
:])
return
j
+=
1
self
.
baseDumpF
(
val
[
i
:
j
])
self
.
baseDumpF
(
self
.
indent
)
i
=
j
def
write
(
self
,
val
):
self
.
dumpF
(
val
)
def
jsonCompactD
(
obj
,
dumpF
,
check_circular
=
False
):
"""Dumps the object obj with a compact json representation using the utf_8 encoding
to the file stream fOut"""
json
.
dump
(
obj
,
DumpToStream
(
dumpF
),
sort_keys
=
True
,
indent
=
None
,
separators
=
(
', '
,
': '
),
ensure_ascii
=
False
,
check_circular
=
check_circular
)
def
jsonIndentD
(
obj
,
dumpF
,
check_circular
=
False
,
extraIndent
=
None
):
"""Dumps the object obj with an indented json representation using the utf_8 encoding
to the function dumpF"""
json
.
dump
(
obj
,
DumpToStream
(
dumpF
,
extraIndent
=
extraIndent
),
sort_keys
=
True
,
indent
=
2
,
separators
=
(
','
,
': '
),
ensure_ascii
=
False
,
check_circular
=
check_circular
,
encoding
=
"utf_8"
)
def
jsonCompactS
(
obj
,
check_circular
=
False
):
"""returns a compact json representation of the object obj as a string"""
return
json
.
dumps
(
obj
,
sort_keys
=
True
,
indent
=
None
,
separators
=
(
', '
,
': '
),
ensure_ascii
=
False
,
check_circular
=
check_circular
,
encoding
=
"utf_8"
)
def
jsonIndentS
(
obj
,
check_circular
=
False
,
extraIndent
=
None
):
"""retuns an indented json representation if the object obj as a string"""
res
=
json
.
dumps
(
obj
,
sort_keys
=
True
,
indent
=
2
,
separators
=
(
','
,
': '
),
ensure_ascii
=
False
,
check_circular
=
check_circular
,
encoding
=
"utf_8"
)
if
extraIndent
:
indent
=
" "
*
extraIndent
res
=
res
.
replace
(
"
\n
"
,
"
\n
"
+
indent
)
return
res
def
jsonDump
(
obj
,
path
):
"""Dumps the object obj to an newly created utf_8 file at path"""
kwds
=
dict
()
if
sys
.
version_info
.
major
>
2
:
kwds
[
"encoding"
]
=
"utf_8"
with
open
(
path
,
"w"
,
**
kwds
)
as
f
:
jsonIndentF
(
obj
,
f
)
class
ShaStreamer
(
object
):
"""a file like object that calculates one or more shas"""
def
__init__
(
self
,
shas
=
None
):
self
.
shas
=
shas
if
shas
is
None
:
self
.
shas
=
(
CompactSha
.
sha224
(),)
def
write
(
self
,
val
):
for
sha
in
self
.
shas
:
sha
.
update
(
val
)
def
b64digests
(
self
):
return
[
sha
.
b32digest
()
for
sha
in
self
.
shas
]
def
addShasOfJson
(
obj
,
shas
=
None
):
"""adds the jsonDump of obj to the shas"""
streamer
=
ShaStreamer
(
shas
)
jsonCompactF
(
obj
,
streamer
)
return
streamer
def
normalizedJsonGid
(
obj
,
shas
=
None
):
"""returns the gid of the standard formatted jsonDump of obj"""
return
map
(
lambda
x
:
'j'
+
x
,
addShasOfJson
(
shas
).
b64digests
())
python/nomadcore/local_meta_info.py
0 → 100644
View file @
0c5adafd
This diff is collapsed.
Click to expand it.
python/nomadcore/model_archive.py
0 → 100644
View file @
0c5adafd
This diff is collapsed.
Click to expand it.
python/nomadcore/model_base.py
0 → 100644
View file @
0c5adafd
from
sqlalchemy.ext.declarative
import
declarative_base
from
sqlalchemy
import
create_engine
,
exc
from
sqlalchemy.orm
import
sessionmaker
import
logging
,
sys
logger
=
logging
.
getLogger
(
__name__
)
handler
=
logging
.
StreamHandler
(
stream
=
sys
.
stdout
)
logger
.
setLevel
(
logging
.
INFO
)
logger
.
addHandler
(
handler
)
Base
=
declarative_base
()
useNested
=
False
useJson
=
False
if
useJson
:
from
sqlalchemy.dialects.postgresql
import
JSON
,
JSONB
def
compareSorted
(
oldValues
,
newValues
):
toAdd
,
toRemove
=
[],
[]
iNew
,
iOld
=
0
,
0
while
iNew
<
len
(
newValues
)
and
iOld
<
len
(
oldValues
):
if
newValues
[
iNew
]
<
oldValues
[
iOld
]:
toAdd
.
append
(
newValues
[
iNew
])
iNew
+=
1
elif
newValues
[
iNew
]
>
oldValues
[
iOld
]:
toRemove
.
append
(
oldValues
[
iOld
])
iOld
+=
1
else
:
iNew
+=
1
iOld
+=
1
toAdd
+=
newValues
[
iNew
:]
toRemove
+=
oldValues
[
iOld
:]
return
(
toRemove
,
toAdd
)
def
createEngine
(
engineStr
=
'sqlite:///:memory:'
,
echo
=
True
):
return
create_engine
(
engineStr
,
echo
=
echo
)
def
createDB
(
engine
):
Base
.
metadata
.
create_all
(
engine
)
def
createSession
(
engine
):
return
sessionmaker
(
bind
=
engine
)()
def
get_or_create
(
cls
,
session
,
defaults
=
None
,
**
kwds
):
result
=
session
.
query
(
cls
).
filter_by
(
**
kwds
).
first
()
if
result
:
return
result
,
True
newVals
=
defaults
if
defaults
is
None
:
newVals
=
{}
newVals
.
update
(
kwds
)
result
=
cls
(
**
newVals
)
if
useNested
:
nestedSession
=
session
.
begin_nested
()
nestedSession
.
add
(
result
)
try
:
nestedSession
.
commit
()
except
exc
.
IntegrityError
:
nestedSession
.
rollback
()
result
=
session
.
query
(
cls
).
filter_by
(
**
kwds
).
one
()
else
:
session
.
add
(
result
)
session
.
flush
()
return
result
,
True
python/nomadcore/model_meta_info.py
0 → 100644
View file @
0c5adafd
This diff is collapsed.
Click to expand it.
python/nomadcore/utils.py
0 → 100644
View file @
0c5adafd
import
readline
# optional, will allow Up/Down/History in the console
import
code
def
goInteractive
(
locals
):
"""Debugging function that when called stops execution and drops in an interactive loop.
Exiting the interpreter will continue execution.
call as follows:
goInteractive(locals())
"""
vars
=
globals
().
copy
()
vars
.
update
(
locals
)
shell
=
code
.
InteractiveConsole
(
vars
)
shell
.
interact
()
python/nomadscripts/__init__.py
0 → 100644
View file @
0c5adafd
python/nomadscripts/calculate_meta_info_overrides.py
0 → 100644
View file @
0c5adafd
#!/usr/bin/env python
import
sys
,
os
,
os
.
path
,
datetime
,
logging
,
json
basePath
=
os
.
path
.
realpath
(
os
.
path
.
dirname
(
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))))
if
not
basePath
in
sys
.
path
:
sys
.
path
.
append
(
basePath
)
from
nomadcore.local_meta_info
import
InfoKindEnv
,
InfoKindEl
,
loadJsonFile
,
loadJsonStream
from
nomadcore.json_support
import
jsonIndentF
,
jsonCompactF
import
git
class
GitTreeDependencySolver
:
def
__init__
(
self
,
tree
,
basePath
,
context
):
self
.
tree
=
tree
self
.
basePath
=
basePath
self
.
context
=
context
self
.
deps
=
{}
def
__call__
(
self
,
infoKindEnv
,
source
,
dep
):
if
not
dep
.
has_key
(
"relativePath"
):
raise
Exception
(
'Invalid dependency for relativeDependencySolver there must be a relativePath'
)
dPath
=
os
.
path
.
normpath
(
os
.
path
.
join
(
self
.
basePath
,
dep
.
get
(
'relativePath'
)))
if
dPath
in
self
.
deps
:
return
self
.
deps
[
dPath
]
depInfo
=
None
depIKEnv
=
InfoKindEnv
(
name
=
os
.
path
.
basename
(
dPath
),
path
=
dPath
,
dependencyLoader
=
infoKindEnv
.
dependencyLoader
)
self
.
deps
[
dPath
]
=
depIKEnv
try
:
f
=
self
.
tree
[
dPath
].
data_stream
except
:
print
self
.
tree
.
hexsha
,
", t.path"
,
self
.
tree
.
path
,
", bpath:"
,
self
.
basePath
,
", dPath:"
,
dPath
raise
Exception
(
"Failed to resolve dependency {0!r} in {context}, due to exception {1}"
.
format
(
dep
,
sys
.
exc_value
,
context
=
self
.
context
))
depInfo
=
json
.
load
(
f
)
if
depInfo
:
depIKEnv
.
fromJsonList
(
depInfo
,
source
=
{
'path'
:
dPath
},
dependencyLoad
=
True
)
return
depIKEnv
def
loadPath
(
path
,
loadOverrides
=
False
):
"""loads all nomadmetainfo.json files within the given path.
If loadOverrides is true then also nomadmetainfooverrides.json files are loaded"""
allEnvs
=
{}
allOverrides
=
{}
hasWarnings
=
False
if
os
.
path
.
isdir
(
path
):
for
dirpath
,
dirnames
,
filenames
in
os
.
walk
(
path
):
for
filename
in
filenames
:
if
filename
.
endswith
(
".nomadmetainfo.json"
):
filepath
=
os
.
path
.
join
(
dirpath
,
filename
)
env
,
warnings
=
loadJsonFile
(
filepath
,
extraArgsHandling
=
InfoKindEl
.
ADD_EXTRA_ARGS
)
if
warnings
.
get
(
"duplicates"
,
False
)
or
warnings
.
get
(
"overrides"
,
False
):
hasWarnings
=
True
logging
.
warn
(
"{0!r}: warnings in loadJsonFile:{1}
\n
"
.
format
(
filepath
,
warnings
))
allEnvs
[
os
.
path
.
relpath
(
filepath
,
path
)]
=
env
elif
loadOverrides
and
filename
.
endswith
(
".nomadmetainfooverrides.json"
):
try
:
with
open
(
filename
)
as
f
:
overrides
=
json
.
load
(
f
)
except
:
hasWarnings
=
True
logging
.
exception
(
"Error loading %r"
,
filename
)
else
:
allOverrides
[
os
.
path
.
relpath
(
filepath
,
path
)]
=
overrides
return
{
"hasWarnings"
:
hasWarnings
,
"envs"
:
allEnvs
,
"overrides"
:
allOverrides
}
def
loadRef
(
rev
,
path
,
loadOverrides
=
False
):
"""loads all nomadmetainfo.json files within path from repo reference"""
allEnvs
=
{}
allOverrides
=
{}
hasWarnings
=
False
if
path
==
"."
:
path
=
""
if
path
:
tree
=
rev
.
tree
[
path
]
else
:
tree
=
rev
.
tree
def
loadBlobHasWarnings
(
obj
):
hasW
=
False
if
obj
.
type
!=
'blob'
:
return
False
rPath
=
os
.
path
.
relpath
(
obj
.
path
,
path
)
if
obj
.
name
.
endswith
(
".nomadmetainfo.json"
):
env
,
warnings
=
loadJsonStream
(
obj
.
data_stream
,
GitTreeDependencySolver
(
rev
.
tree
,
os
.
path
.
dirname
(
obj
.
path
),
obj
.
path
),
filePath
=
obj
.
path
)
allEnvs
[
rPath
]
=
env
dup
=
warnings
.
get
(
"duplicates"
)
hid
=
warnings
.
get
(
"hidden"
)
if
dup
or
hid
:
hasW
=
True
logging
.
warn
(
"loading {path} of revision {rev}: {warnings}
\n
"
.
format
(
path
=
obj
.
path
,
rev
=
rev
.
name_rev
,
warnings
=
warnings
))
elif
obj
.
name
.
endswith
(
".nomadmetainfooverrides.json"
):
try
:
overrides
=
json
.
load
(
obj
.
data_stream
)
except
:
hasW
=
True
logging
.
exception
(
"Error loading %r in revision %s"
,
obj
.
path
,
rev
.
hexsha
[:
10
])
else
:
allOverrides
[
rPath
]
=
overrides
return
hasW
if
tree
.
type
==
'blob'
:
if
loadBlobHasWarnings
(
tree
):
hasWarnings
=
True
else
:
for
obj
in
tree
.
traverse
(
lambda
i
,
d
:
i
.
type
==
'blob'
and
(
i
.
path
.
endswith
(
".nomadmetainfo.json"
)
or
(
loadOverrides
and
i
.
path
.
endswith
(
".nomadmetainfooverrides.json"
)))):
if
loadBlobHasWarnings
(
obj
):
hasWarnings
=
True
return
{
"hasWarnings"
:
hasWarnings
,
"envs"
:
allEnvs
,
"overrides"
:
allOverrides
}
def
insertDicList
(
dict
,
key
,
value
):
if
key
in
dict
:
dict
[
key
].
append
(
value
)
else
:
dict
[
key
]
=
[
value
]
def
prepareValues
(
values
,
name
):
"prepares the values computing gids"
allNames
=
{}
hasWarnings
=
False
for
path
,
env
in
values
.
items
():
try
:
env
.
calcGids
()
except
:
hasWarnings
=
True
logging
.
warn
(
"Error calculating gids of {path!r} {name}: {exc}
\n
"
.
format
(
path
=
path
,
name
=
name
,
exc
=
sys
.
exc_value
))
noDepNames
=
env
.
noDepNames
()
for
name
,
gid
in
env
.
gids
.
items
():
if
name
in
noDepNames
:
insertDicList
(
allNames
,
name
,
(
path
,
gid
))
return
{
"allNames"
:
allNames
,
"envs"
:
values
,
"hasWarnings"
:
hasWarnings
}
def
cmpOverrides
(
o1
,
o2
):
c
=
cmp
(
o1
[
"name"
],
o2
[
"name"
])
if
c
!=
0
:
return
c
return
cmp
(
o1
,
o2
)
def
buildOverrides
(
oldValues
,
newValues
):
oldV
=
prepareValues
(
oldValues
,
"(old)"
)
newV
=
prepareValues
(
newValues
,
"(new)"
)
oldNames
=
oldV
[
"allNames"
]
newNames
=
newV
[
"allNames"
]
overrides
=
[]
complexOverrides
=
[]
additions
=
[]
removals
=
[]
hasWarnings
=
oldV
[
"hasWarnings"
]
or
newV
[
"hasWarnings"
]
nMatched
=
0
for
name
,
oldUses
in
oldNames
.
items
():
if
not
name
in
newNames
:
removals
.
append
({
"name"
:
name
,
"oldGids"
:
map
(
lambda
x
:
x
[
1
],
oldUses
),
"newGids"
:
[]})
continue
newUses
=
newNames
[
name
]
newNoMatch
=
list
(
newUses
)
oldNoMatch
=
[]
for
oldPath
,
oldGid
in
oldUses
:
found
=
-
1
for
i
in
range
(
len
(
newNoMatch
)):
newPath
,
newGid
=
newNoMatch
[
i
]
if
newGid
==
oldGid
:
del
newNoMatch
[
i
]
found
=
i
break
if
found
!=
-
1
:
nMatched
+=
1
else
:
oldNoMatch
.
append
((
oldPath
,
oldGid
))
if
not
oldNoMatch
and
not
newNoMatch
:
continue
if
len
(
oldNoMatch
)
==
1
and
len
(
newNoMatch
)
==
1
:
overrides
.
append
({
"name"
:
name
,
"oldGid"
:
oldNoMatch
[
0
][
1
],
"newGid"
:
newNoMatch
[
0
][
1
]})
continue
# try path matching
iOld
=
0
while
iOld
<
len
(
oldNoMatch
):
oldPath
,
oldGid
=
oldNoMatch
[
iOld
]
found
=
-
1
for
iNew
in
range
(
len
(
newNoMatch
)):
newPath
,
newGid
=
newNoMatch
[
iNew
]
if
newPath
==
oldPath
:
overrides
.
append
({
"name"
:
name
,
"oldGid"
:
oldGid
,
"newGid"
:
newGid
})
del
newNoMatch
[
iNew
]
del
old
found
=
iNew
break
if
found
!=
-
1
:
del
oldNoMatch
[
iOld
]
else
:
iOld
+=
1
if
len
(
oldNoMatch
)
==
1
and
len
(
newNoMatch
)
==
1
:
overrides
.
append
({
"name"
:
name
,
"oldGid"
:
oldNoMatch
[
0
][
1
],
"newGid"
:
newNoMatch
[
0
][
1
]})
if
not
oldNoMatch
and
not
newNoMatch
:
continue
elif
oldNoMatch
and
not
newNoMatch
:
removals
.
append
({
"name"
:
name
,
"oldGids"
:
map
(
lambda
x
:
x
[
1
],
oldNoMatch
),
"newGids"
:
[]})
elif
not
oldNoMatch
and
newNoMatch
:
additions
.
append
({
"name"
:
name
,
"oldGids"
:
[],
"newGids"
:
map
(
lambda
x
:
x
[
1
],
newNoMatch
)})
else
:
# oldNoMatch and newNoMatch
complexOverrides
.
append
({
"name"
:
name
,
"oldGids"
:
oldNoMatch
,
"newGids"
:
newNoMatch
[
0
][
1
]})
for
name
in
newNames
.
keys
():
if
not
name
in
oldNames
:
additions
.
append
({
"name"
:
name
,
"oldGids"
:
[],
"newGids"
:
map
(
lambda
x
:
x
[
1
],
newNames
[
name
])})
overrides
.
sort
(
cmpOverrides
)
complexOverrides
.
sort
(
cmpOverrides
)
additions
.
sort
(
cmpOverrides
)
removals
.
sort
(
cmpOverrides
)
return
{
"oldNames"
:
oldNames
,
"newNames"
:
newNames
,
"overrides"
:
overrides
,
"complexOverrides"
:
complexOverrides
,
"additions"
:
additions
,
"removals"
:
removals
,
"hasWarnings"
:
hasWarnings
,
"nMatched"
:
nMatched
}
if
__name__
==
"__main__"
:
overridesDir
=
os
.
path
.
normpath
(
os
.
path
.
join
(
basePath
,
"../nomad_meta_info_overrides"
))
defaultPath
=
os
.
path
.
normpath
(
os
.
path
.
join
(
basePath
,
"../nomad_meta_info"
))
usage
=
"""usage: {command} [--check-only] [--old-ref <ref1=HEAD>] [--old-path <path=None>] [--help]
[--new-ref <ref2=None>] [--new-path <path=basePath>] [--repo-path <repoPath=None>]
[--overrides-dir <overridesDir>] [--no-clobber] [--verbose] [<basePath>]
Generates the overrides for the kind info with the same name but different sha from the old version
to the new version.
Old version can be given as a revision in git (defaults to HEAD) with --old-ref, or a path with
--old-path, likewise the new version can be given as a revision with --new-ref or new-path.
basePath (which defaults to {defaultPath}) gives the path of which the revisions should be compared (might be a
subdirectory or file of the repo) and if no --new-ref is given the version checked out there is
directly the new vesion.
The repository path by default is found looking from path upward for a .git repository, but can
also be given explicitly with --repo-path.
overridesDir defaults to {overridesDir}
By default output goes to stdout, but an explicit --out-file can also be given
"""
.
format
(
command
=
os
.
path
.
basename
(
sys
.
argv
[
0
]),
overridesDir
=
overridesDir
,
defaultPath
=
defaultPath
)
oldRef
=
None
oldPath
=
None
newRef
=
None
newPath
=
None
repoPath
=
None
path
=
None
checkOnly
=
False
noClobber
=
False
verbose
=
False
iArg
=
1
while
iArg
<
len
(
sys
.
argv
):
arg
=
sys
.
argv
[
iArg
]
if
arg
==
"--help"
:
sys
.
stderr
.
write
(
usage
)
sys
.
exit
(
0
)
elif
arg
==
"--check-only"
:
checkOnly
=
True
elif
arg
==
"--no-clobber"
:
noClobber
=
True
elif
arg
==
"--verbose"
:
verbose
=
True
elif
arg
==
"--old-ref"
:
iArg
+=
1
if
iArg
<
len
(
sys
.
argv
):
oldRef
=
sys
.
argv
[
iArg
]
else
:
sys
.
stderr
.
write
(
"Error: missing reference after --old-ref
\n\n
"
)
sys
.
stderr
.
write
(
usage
)
sys
.
exit
(
1
)
elif
arg
==
"--old-path"
:
iArg
+=
1
if
iArg
<
len
(
sys
.
argv
):
oldPath
=
sys
.
argv
[
iArg
]
else
:
sys
.
stderr
.
write
(
"Error: missing path after --old-path
\n\n
"
)
sys
.
stderr
.
write
(
usage
)
sys
.
exit
(
2
)
elif
arg
==
"--new-ref"
:
iArg
+=
1
if
iArg
<
len
(
sys
.
argv
):
newRef
=
sys
.
argv
[
iArg
]
else
:
sys
.
stderr
.
write
(
"Error: missing reference after --new-ref
\n\n
"
)
sys
.
stderr
.
write
(
usage
)
sys
.
exit
(
3
)
elif
arg
==
"--new-path"
:
iArg
+=
1
if
iArg
<
len
(
sys
.
argv
):
newPath
=
sys
.
argv
[
iArg
]
else
:
sys
.
stderr
.
write
(
"Error: missing path after --new-path
\n\n
"
)
sys
.
stderr
.
write
(
usage
)
sys
.
exit
(
4
)
elif
arg
==
"--repo-path"
:
iArg
+=
1
if
iArg
<
len
(
sys
.
argv
):
repoPath
=
sys
.
argv
[
iArg
]
else
:
sys
.
stderr
.
write
(
"Error: missing path after --repo-path
\n\n
"
)
sys
.
stderr
.
write
(
usage
)
sys
.
exit
(
5
)
elif
arg
==
"--overrides-dir"
:
iArg
+=
1
if
iArg
<
len
(
sys
.
argv
):
overridesDir
=
sys
.
argv
[
iArg
]
else
:
sys
.
stderr
.
write
(
"Error: missing reference after --overrides-dir
\n\n
"
)
sys
.
stderr
.
write
(
usage
)
sys
.
exit
(
7
)
elif
path
is
None
:
path
=
arg
else
:
sys
.
stderr
.
write
(
"Error: unexpected extra argument {0!r}
\n\n
"
.
format
(
arg
))
sys
.
stderr
.
write
(
usage
)
sys
.
exit
(
8
)
iArg
+=
1
if
path
is
None
:
path
=
defaultPath
requiresRepo
=
True
if
oldPath
is
None
and
oldRef
is
None
:
oldRef
=
"HEAD"
if
not
newRef
and
not
newPath
:
newPath
=
path