Provide correct link to hydra job or if exists build (#81)
also add --force option to import-channel script which will always recreate index fixes #72
This commit is contained in:
parent
7a9d4cd9d4
commit
9413e66d90
|
@ -29,8 +29,23 @@ logger = logging.getLogger("import-channel")
|
|||
click_log.basic_config(logger)
|
||||
|
||||
|
||||
S3_BUCKET = "nix-releases"
|
||||
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
INDEX_SCHEMA_VERSION = 4
|
||||
INDEX_SCHEMA_VERSION = 5
|
||||
CHANNELS = {
|
||||
"unstable": {
|
||||
"packages": "nixpkgs/nixpkgs-20.09pre",
|
||||
"options": "nixos/unstable/nixos-20.09pre",
|
||||
},
|
||||
"19.09": {
|
||||
"packages": "nixpkgs/nixpkgs-19.09pre",
|
||||
"options": "nixos/19.09/nixos-19.09.",
|
||||
},
|
||||
"20.03": {
|
||||
"packages": "nixpkgs/nixpkgs-19.09pre",
|
||||
"options": "nixos/20.03/nixos-20.03.",
|
||||
},
|
||||
}
|
||||
ANALYSIS = {
|
||||
"analyzer": {
|
||||
"nixAttrName": {
|
||||
|
@ -109,19 +124,25 @@ MAPPING = {
|
|||
"properties": {
|
||||
"type": {"type": "keyword"},
|
||||
# Package fields
|
||||
"package_hydra_build_id": {"type": "keyword"},
|
||||
"package_hydra_build_status": {"type": "keyword"},
|
||||
"package_hydra_project": {"type": "keyword"},
|
||||
"package_hydra_job": {"type": "keyword"},
|
||||
"package_hydra_jobset": {"type": "keyword"},
|
||||
"package_hydra_path": {
|
||||
"package_hydra_build": {
|
||||
"type": "nested",
|
||||
"properties": {
|
||||
"output": {"type": "keyword"},
|
||||
"path": {"type": "keyword"}
|
||||
}
|
||||
"build_id": {"type": "keyword"},
|
||||
"build_status": {"type": "keyword"},
|
||||
"platform": {"type": "keyword"},
|
||||
"project": {"type": "keyword"},
|
||||
"jobset": {"type": "keyword"},
|
||||
"job": {"type": "keyword"},
|
||||
"path": {
|
||||
"type": "nested",
|
||||
"properties": {
|
||||
"output": {"type": "keyword"},
|
||||
"path": {"type": "keyword"}
|
||||
},
|
||||
},
|
||||
"drv_path": {"type": "keyword"},
|
||||
},
|
||||
},
|
||||
"package_hydra_drvpath": {"type": "keyword"},
|
||||
"package_attr_name": {
|
||||
"type": "text",
|
||||
"analyzer": "nixAttrName",
|
||||
|
@ -172,54 +193,38 @@ MAPPING = {
|
|||
}
|
||||
|
||||
|
||||
def get_last_evaluation(channel):
|
||||
logger.debug(f"Retriving last evaluation for {channel} channel")
|
||||
|
||||
project, project_version = channel.split("-", 1)
|
||||
logger.debug(f"get_last_evaluation: project='{project}'")
|
||||
logger.debug(f"get_last_evaluation: project_version='{project_version}'")
|
||||
|
||||
bucket = "nix-releases"
|
||||
prefix = f"{project}/{project_version}/"
|
||||
logger.debug(
|
||||
f"get_last_evaluation: list all evaluation in '{bucket}' bucker under '{prefix}' prefix"
|
||||
)
|
||||
def get_last_evaluation(prefix):
|
||||
logger.debug(f"Retriving last evaluation for {prefix} prefix.")
|
||||
|
||||
s3 = boto3.client(
|
||||
"s3", config=botocore.client.Config(signature_version=botocore.UNSIGNED)
|
||||
)
|
||||
s3_result = s3.list_objects(Bucket=bucket, Prefix=prefix, Delimiter="/",)
|
||||
s3_result = s3.list_objects(Bucket=S3_BUCKET, Prefix=prefix, Delimiter="/",)
|
||||
evaluations = []
|
||||
for item in s3_result.get("CommonPrefixes"):
|
||||
if not item:
|
||||
continue
|
||||
logger.debug(f"get_last_evaluation: evaluation in raw {item}")
|
||||
prefix = item.get("Prefix")
|
||||
evaluation = prefix[len(f"{project}/{project_version}/{channel}") :]
|
||||
if evaluation.startswith("beta"):
|
||||
evaluation = evaluation[len("beta") :]
|
||||
try:
|
||||
revisions_since_start, git_revision = (
|
||||
evaluation.lstrip(".").rstrip("/").split(".")
|
||||
)
|
||||
except Exception as e: # noqa
|
||||
continue
|
||||
revisions_since_start, git_revision = item['Prefix'][len(prefix):].rstrip('/').split('.')
|
||||
except:
|
||||
__import__('pdb').set_trace()
|
||||
evaluation = {
|
||||
"revisions_since_start": int(revisions_since_start),
|
||||
"git_revision": git_revision,
|
||||
"prefix": prefix,
|
||||
"prefix": item['Prefix'].rstrip('/'),
|
||||
}
|
||||
logger.debug(f"get_last_evaluation: evaluation {evaluation}")
|
||||
evaluations.append(evaluation)
|
||||
|
||||
logger.debug(
|
||||
f"get_last_evaluation: {len(evaluations)} evaluations found for {channel} channel"
|
||||
f"get_last_evaluation: {len(evaluations)} evaluations found for {prefix} prefix"
|
||||
)
|
||||
evaluations = sorted(evaluations, key=lambda i: i["revisions_since_start"])
|
||||
|
||||
evaluation = evaluations[-1]
|
||||
|
||||
result = s3.get_object(Bucket=bucket, Key=f"{evaluation['prefix']}src-url")
|
||||
result = s3.get_object(Bucket=S3_BUCKET, Key=f"{evaluation['prefix']}/src-url")
|
||||
evaluation['id'] = result.get("Body").read().decode()[len("https://hydra.nixos.org/eval/"):]
|
||||
|
||||
logger.debug(f"get_last_evaluation: last evaluation is: {evaluation}")
|
||||
|
@ -250,10 +255,12 @@ def get_evaluation_builds(evaluation_id):
|
|||
with open(filename) as f:
|
||||
builds = json.loads(f.read())
|
||||
|
||||
return {
|
||||
f"{build['nixname']}.{build['system']}": build
|
||||
for build in builds
|
||||
}
|
||||
result = {}
|
||||
for build in builds:
|
||||
result.setdefault(build['nixname'], {})
|
||||
result[build['nixname']][build['system']] = build
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_packages(evaluation, evaluation_builds):
|
||||
|
@ -315,38 +322,30 @@ def get_packages(evaluation, evaluation_builds):
|
|||
):
|
||||
attr_set = None
|
||||
|
||||
hydra_build_id = None
|
||||
hydra_build_status = None
|
||||
hydra_job = None
|
||||
hydra_jobset = None
|
||||
hydra_path = None
|
||||
hydra_drvpath = None
|
||||
build_key = f"{data['name']}.{data['system']}"
|
||||
if build_key in evaluation_builds:
|
||||
build = evaluation_builds[build_key]
|
||||
hydra_build_id = build['id']
|
||||
hydra_build_status = build['buildstatus']
|
||||
hydra_project = build['project']
|
||||
hydra_job = build['job']
|
||||
hydra_jobset = build['jobset']
|
||||
hydra_path = [
|
||||
{
|
||||
"output": output,
|
||||
"path": item['path'],
|
||||
}
|
||||
for output, item in build['buildoutputs'].items()
|
||||
]
|
||||
hydra_drvpath = build['drvpath']
|
||||
hydra = None
|
||||
if data['name'] in evaluation_builds:
|
||||
hydra = []
|
||||
for platform, build in evaluation_builds[data['name']].items():
|
||||
hydra.append({
|
||||
"build_id": build['id'],
|
||||
"build_status": build['buildstatus'],
|
||||
"platform": build['system'],
|
||||
"project": build['project'],
|
||||
"jobset": build['jobset'],
|
||||
"job": build['job'],
|
||||
"path": [
|
||||
{
|
||||
"output": output,
|
||||
"path": item['path'],
|
||||
}
|
||||
for output, item in build['buildoutputs'].items()
|
||||
],
|
||||
"drv_path": build['drvpath'],
|
||||
})
|
||||
|
||||
yield dict(
|
||||
type="package",
|
||||
package_hydra_build_id=hydra_build_id,
|
||||
package_hydra_build_status=hydra_build_status,
|
||||
package_hydra_project=hydra_project,
|
||||
package_hydra_job=hydra_job,
|
||||
package_hydra_jobset=hydra_jobset,
|
||||
package_hydra_path=hydra_path,
|
||||
package_hydra_drvpath=hydra_drvpath,
|
||||
package_hydra=hydra,
|
||||
package_attr_name=attr_name,
|
||||
package_attr_set=attr_set,
|
||||
package_pname=data["pname"],
|
||||
|
@ -403,10 +402,14 @@ def get_options(evaluation):
|
|||
return len(options), gen
|
||||
|
||||
|
||||
def ensure_index(es, index, mapping):
|
||||
def ensure_index(es, index, mapping, force=False):
|
||||
if es.indices.exists(index):
|
||||
logger.debug(f"ensure_index: index '{index}' already exists")
|
||||
return False
|
||||
if not force:
|
||||
return False
|
||||
|
||||
logger.debug(f"ensure_index: Deleting index '{index}'")
|
||||
es.indices.delete(index)
|
||||
|
||||
es.indices.create(
|
||||
index=index,
|
||||
|
@ -420,10 +423,18 @@ def ensure_index(es, index, mapping):
|
|||
return True
|
||||
|
||||
|
||||
def create_index_name(channel, evaluation):
|
||||
def create_index_name(channel, evaluation_packages, evaluation_options):
|
||||
evaluation_name = '-'.join([
|
||||
evaluation_packages['id'],
|
||||
str(evaluation_packages['revisions_since_start']),
|
||||
evaluation_packages['git_revision'],
|
||||
evaluation_options['id'],
|
||||
str(evaluation_options['revisions_since_start']),
|
||||
evaluation_options['git_revision'],
|
||||
])
|
||||
return (
|
||||
f"latest-{INDEX_SCHEMA_VERSION}-{channel}",
|
||||
f"evaluation-{INDEX_SCHEMA_VERSION}-{channel}-{evaluation['id']}-{evaluation['revisions_since_start']}-{evaluation['git_revision']}",
|
||||
f"evaluation-{INDEX_SCHEMA_VERSION}-{channel}-{evaluation_name}",
|
||||
)
|
||||
|
||||
|
||||
|
@ -464,10 +475,11 @@ def write(unit, es, index_name, number_of_items, item_generator):
|
|||
|
||||
|
||||
@click.command()
|
||||
@click.option("-u", "--es-url", help="Elasticsearch connection url")
|
||||
@click.option("-c", "--channel", help="NixOS channel name")
|
||||
@click.option("-u", "--es-url", help="Elasticsearch connection url.")
|
||||
@click.option("-c", "--channel", type=click.Choice(CHANNELS.keys()), help="Channel.")
|
||||
@click.option("-f", "--force", is_flag=True, help="Force channel recreation.")
|
||||
@click.option("-v", "--verbose", count=True)
|
||||
def main(es_url, channel, verbose):
|
||||
def main(es_url, channel, force, verbose):
|
||||
|
||||
logging_level = "CRITICAL"
|
||||
if verbose == 1:
|
||||
|
@ -479,18 +491,18 @@ def main(es_url, channel, verbose):
|
|||
logger.debug(f"Verbosity is {verbose}")
|
||||
logger.debug(f"Logging set to {logging_level}")
|
||||
|
||||
evaluation = get_last_evaluation(channel)
|
||||
evaluation_builds = get_evaluation_builds(evaluation['id'])
|
||||
evaluation_packages = get_last_evaluation(CHANNELS[channel]['packages'])
|
||||
evaluation_options = get_last_evaluation(CHANNELS[channel]['options'])
|
||||
evaluation_packages_builds = get_evaluation_builds(evaluation_packages['id'])
|
||||
|
||||
es = elasticsearch.Elasticsearch([es_url])
|
||||
|
||||
# ensure indexes exist
|
||||
alias_name, index_name = create_index_name(channel, evaluation)
|
||||
index_created = ensure_index(es, index_name, MAPPING)
|
||||
alias_name, index_name = create_index_name(channel, evaluation_packages, evaluation_options)
|
||||
index_created = ensure_index(es, index_name, MAPPING, force)
|
||||
|
||||
if index_created:
|
||||
write("packages", es, index_name, *get_packages(evaluation, evaluation_builds))
|
||||
write("options", es, index_name, *get_options(evaluation))
|
||||
write("packages", es, index_name, *get_packages(evaluation_packages, evaluation_packages_builds))
|
||||
write("options", es, index_name, *get_options(evaluation_options))
|
||||
|
||||
update_alias(es, alias_name, index_name)
|
||||
|
||||
|
|
|
@ -376,7 +376,7 @@ makeRequest :
|
|||
makeRequest options channel query from size =
|
||||
Search.makeRequest
|
||||
(makeRequestBody query from size)
|
||||
("latest-" ++ String.fromInt options.mappingSchemaVersion ++ "-nixos-" ++ channel)
|
||||
("latest-" ++ String.fromInt options.mappingSchemaVersion ++ "-" ++ channel)
|
||||
decodeResultItemSource
|
||||
options
|
||||
query
|
||||
|
|
|
@ -65,6 +65,7 @@ type alias ResultItemSource =
|
|||
, position : Maybe String
|
||||
, homepage : Maybe String
|
||||
, system : String
|
||||
, hydra : Maybe (List ResultPackageHydra)
|
||||
}
|
||||
|
||||
|
||||
|
@ -81,6 +82,24 @@ type alias ResultPackageMaintainer =
|
|||
}
|
||||
|
||||
|
||||
type alias ResultPackageHydra =
|
||||
{ build_id : Int
|
||||
, build_status : Int
|
||||
, platform : String
|
||||
, project : String
|
||||
, jobset : String
|
||||
, job : String
|
||||
, path : List ResultPackageHydraPath
|
||||
, drv_path : String
|
||||
}
|
||||
|
||||
|
||||
type alias ResultPackageHydraPath =
|
||||
{ output : String
|
||||
, path : String
|
||||
}
|
||||
|
||||
|
||||
init :
|
||||
Maybe String
|
||||
-> Maybe String
|
||||
|
@ -221,7 +240,7 @@ viewResultItemDetails channel item =
|
|||
Just value ->
|
||||
wrapWith value
|
||||
|
||||
allowedPlatforms platform =
|
||||
mainPlatforms platform =
|
||||
List.member platform
|
||||
[ "x86_64-linux"
|
||||
, "aarch64-linux"
|
||||
|
@ -229,22 +248,42 @@ viewResultItemDetails channel item =
|
|||
, "i686-linux"
|
||||
]
|
||||
|
||||
showPlatforms platforms =
|
||||
platforms
|
||||
|> List.filter allowedPlatforms
|
||||
|> List.map showPlatform
|
||||
getHydraDetailsForPlatform hydra platform =
|
||||
hydra
|
||||
|> Maybe.andThen
|
||||
(\hydras ->
|
||||
hydras
|
||||
|> List.filter (\x -> x.platform == platform)
|
||||
|> List.head
|
||||
)
|
||||
|
||||
showPlatform platform =
|
||||
showPlatforms hydra platforms =
|
||||
platforms
|
||||
|> List.filter mainPlatforms
|
||||
|> List.map (showPlatform hydra)
|
||||
|
||||
showPlatform hydra platform =
|
||||
li []
|
||||
[ case Search.channelDetailsFromId channel of
|
||||
Just channelDetails ->
|
||||
[ case
|
||||
( getHydraDetailsForPlatform hydra platform
|
||||
, Search.channelDetailsFromId channel
|
||||
)
|
||||
of
|
||||
( Just hydraDetails, _ ) ->
|
||||
a
|
||||
[ href <| "https://hydra.nixos.org/job/" ++ channelDetails.jobset ++ "/nixpkgs." ++ item.source.attr_name ++ "." ++ item.source.system
|
||||
[ href <| "https://hydra.nixos.org/build/" ++ String.fromInt hydraDetails.build_id
|
||||
]
|
||||
[ text platform
|
||||
]
|
||||
|
||||
Nothing ->
|
||||
( Nothing, Just channelDetails ) ->
|
||||
a
|
||||
[ href <| "https://hydra.nixos.org/job/" ++ channelDetails.jobset ++ "/nixpkgs." ++ item.source.attr_name ++ "." ++ platform
|
||||
]
|
||||
[ text platform
|
||||
]
|
||||
|
||||
( _, _ ) ->
|
||||
text platform
|
||||
]
|
||||
|
||||
|
@ -284,7 +323,7 @@ viewResultItemDetails channel item =
|
|||
, dt [] [ text <| "Nix expression" ]
|
||||
, dd [] [ withDefault asGithubLink item.source.position ]
|
||||
, dt [] [ text "Platforms" ]
|
||||
, dd [] [ ul [ class "inline" ] <| showPlatforms item.source.platforms ]
|
||||
, dd [] [ ul [ class "inline" ] <| showPlatforms item.source.hydra item.source.platforms ]
|
||||
, dt [] [ text "Homepage" ]
|
||||
, dd [] [ withDefault asLink item.source.homepage ]
|
||||
, dt [] [ text "Licenses" ]
|
||||
|
@ -437,7 +476,7 @@ makeRequest :
|
|||
makeRequest options channel query from size =
|
||||
Search.makeRequest
|
||||
(makeRequestBody query from size)
|
||||
("latest-" ++ String.fromInt options.mappingSchemaVersion ++ "-nixos-" ++ channel)
|
||||
("latest-" ++ String.fromInt options.mappingSchemaVersion ++ "-" ++ channel)
|
||||
decodeResultItemSource
|
||||
options
|
||||
query
|
||||
|
@ -464,6 +503,7 @@ decodeResultItemSource =
|
|||
|> Json.Decode.Pipeline.required "package_position" (Json.Decode.nullable Json.Decode.string)
|
||||
|> Json.Decode.Pipeline.required "package_homepage" (Json.Decode.nullable Json.Decode.string)
|
||||
|> Json.Decode.Pipeline.required "package_system" Json.Decode.string
|
||||
|> Json.Decode.Pipeline.required "package_hydra" (Json.Decode.nullable (Json.Decode.list decodeResultPackageHydra))
|
||||
|
||||
|
||||
decodeResultPackageLicense : Json.Decode.Decoder ResultPackageLicense
|
||||
|
@ -479,3 +519,24 @@ decodeResultPackageMaintainer =
|
|||
(Json.Decode.field "name" Json.Decode.string)
|
||||
(Json.Decode.field "email" Json.Decode.string)
|
||||
(Json.Decode.field "github" (Json.Decode.nullable Json.Decode.string))
|
||||
|
||||
|
||||
decodeResultPackageHydra : Json.Decode.Decoder ResultPackageHydra
|
||||
decodeResultPackageHydra =
|
||||
Json.Decode.succeed ResultPackageHydra
|
||||
|> Json.Decode.Pipeline.required "build_id" Json.Decode.int
|
||||
|> Json.Decode.Pipeline.required "build_status" Json.Decode.int
|
||||
|> Json.Decode.Pipeline.required "platform" Json.Decode.string
|
||||
|> Json.Decode.Pipeline.required "project" Json.Decode.string
|
||||
|> Json.Decode.Pipeline.required "jobset" Json.Decode.string
|
||||
|> Json.Decode.Pipeline.required "job" Json.Decode.string
|
||||
|> Json.Decode.Pipeline.required "path" (Json.Decode.list decodeResultPackageHydraPath)
|
||||
|> Json.Decode.Pipeline.required "drv_path" Json.Decode.string
|
||||
|
||||
|
||||
decodeResultPackageHydraPath : Json.Decode.Decoder ResultPackageHydraPath
|
||||
decodeResultPackageHydraPath =
|
||||
Json.Decode.map2 ResultPackageHydraPath
|
||||
(Json.Decode.field "output" Json.Decode.string)
|
||||
(Json.Decode.field "path" Json.Decode.string)
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ const {Elm} = require('./Main');
|
|||
|
||||
Elm.Main.init({
|
||||
flags: {
|
||||
elasticsearchMappingSchemaVersion: process.env.ELASTICSEARCH_MAPPING_SCHEMA_VERSION || 4,
|
||||
elasticsearchMappingSchemaVersion: process.env.ELASTICSEARCH_MAPPING_SCHEMA_VERSION || 5,
|
||||
elasticsearchUrl: process.env.ELASTICSEARCH_URL || 'https://nixos-search-5886075189.us-east-1.bonsaisearch.net:443',
|
||||
elasticsearchUsername : process.env.ELASTICSEARCH_USERNAME || 'z3ZFJ6y2mR',
|
||||
elasticsearchPassword : process.env.ELASTICSEARCH_PASSWORD || 'ds8CEvALPf9pui7XG'
|
||||
|
|
Loading…
Reference in a new issue